index
int64
0
0
repo_id
stringlengths
26
205
file_path
stringlengths
51
246
content
stringlengths
8
433k
__index_level_0__
int64
0
10k
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbMainTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.ParseException; import org.bson.BsonDocument; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.base.Strings.isNullOrEmpty; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.USER_HOME_PROPERTY; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperty.SCHEMA_NAME; import static software.amazon.documentdb.jdbc.DocumentDbMain.COMPLETE_OPTIONS; import static software.amazon.documentdb.jdbc.DocumentDbMain.tryGetConnectionProperties; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.DEFAULT_SCHEMA_NAME; import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader.TABLE_SCHEMA_COLLECTION; class DocumentDbMainTest { // Ensure custom schema can be sorted after "_default" - so start with a lower-case letter private static final String CUSTOM_SCHEMA_NAME = "a" + UUID.randomUUID(); public static final String NEW_DEFAULT_SCHEMA_ANY_VERSION_REGEX = Pattern.quote("New schema '_default', version '") + "\\d+" + Pattern.quote("' generated."); public static final Pattern NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN = Pattern .compile(NEW_DEFAULT_SCHEMA_ANY_VERSION_REGEX); private DocumentDbConnectionProperties properties; public static final Path USER_HOME_PATH = Paths.get(System.getProperty(USER_HOME_PROPERTY)); private static Stream<DocumentDbTestEnvironment> getTestEnvironments() { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream(); } @BeforeAll static void beforeAll() throws Exception { for (DocumentDbTestEnvironment environment : getTestEnvironments() .collect(Collectors.toList())) { environment.start(); } } @AfterEach void afterEach() { if (properties != null) { try (DocumentDbSchemaWriter writer = new DocumentDbSchemaWriter(properties, null)) { writer.remove(DEFAULT_SCHEMA_NAME); writer.remove(CUSTOM_SCHEMA_NAME); } } properties = null; } @AfterAll static void afterAll() throws Exception { for (DocumentDbTestEnvironment environment : getTestEnvironments() .collect(Collectors.toList())) { environment.stop(); } } @DisplayName("Tests empty command line with no options provided.") @Test void testEmptyCommandLine() throws SQLException { final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(new String[] {}, output); Assertions.assertEquals( "Missing required options: [-g Generates a new schema for the database. This will have the effect of replacing an existing schema of the same name, if it exists., -r Removes the schema from storage for schema given by -m <schema-name>, or for schema '_default', if not provided., -l Lists the schema names, version and table names available in the schema repository., -b Lists the SQL table names in a schema., -e Exports the schema to for SQL tables named [<table-name>[,<table-name>[…]]]. If no <table-name> are given, all table schema will be exported. By default, the schema is written to stdout. Use the --output option to write to a file. The output format is JSON., -i Imports the schema from <file-name> in your home directory. The schema will be imported using the <schema-name> and a new version will be added - replacing the existing schema. The expected input format is JSON.], s, d, u\n" + "usage: " + DocumentDbMain.getLibraryName() + " [-g | -r | -l | -b | -e <[table-name[,...]]> |\n" + " -i <file-name>] -s <host-name> -d\n" + " <database-name> -u <user-name> [-p <password>]\n" + " [-n <schema-name>] [-m <method>] [-x\n" + " <max-documents>] [-t] [-a] [-o <file-name>]\n" + " [-h] [--version]\n" + " -a,--tls-allow-invalid-hostnames The indicator of whether to allow invalid\n" + " hostnames when connecting to DocumentDB.\n" + " Default: false.\n" + " -b,--list-tables Lists the SQL table names in a schema.\n" + " -d,--database <database-name> The name of the database for the schema\n" + " operations. Required.\n" + " -e,--export <[table-name[,...]]> Exports the schema to for SQL tables named\n" + " [<table-name>[,<table-name>[…]]]. If no\n" + " <table-name> are given, all table schema will\n" + " be exported. By default, the schema is\n" + " written to stdout. Use the --output option to\n" + " write to a file. The output format is JSON.\n" + " -g,--generate-new Generates a new schema for the database. This\n" + " will have the effect of replacing an existing\n" + " schema of the same name, if it exists.\n" + " -h,--help Prints the command line syntax.\n" + " -i,--import <file-name> Imports the schema from <file-name> in your\n" + " home directory. The schema will be imported\n" + " using the <schema-name> and a new version\n" + " will be added - replacing the existing\n" + " schema. The expected input format is JSON.\n" + " -l,--list-schema Lists the schema names, version and table\n" + " names available in the schema repository.\n" + " -m,--scan-method <method> The scan method to sample documents from the\n" + " collections. One of: random, idForward,\n" + " idReverse, or all. Used in conjunction with\n" + " the --generate-new command. Default: random.\n" + " -n,--schema-name <schema-name> The name of the schema. Default: _default.\n" + " -o,--output <file-name> Write the exported schema to <file-name> in\n" + " your home directory (instead of stdout). This\n" + " will overwrite any existing file with the\n" + " same name\n" + " -p,--password <password> The password for the user performing the\n" + " schema operations. Optional. If this option\n" + " is not provided, the end-user will be\n" + " prompted to enter the password directly.\n" + " -r,--remove Removes the schema from storage for schema\n" + " given by -m <schema-name>, or for schema\n" + " '_default', if not provided.\n" + " -s,--server <host-name> The hostname and optional port number\n" + " (default: 27017) in the format\n" + " hostname[:port]. Required.\n" + " -t,--tls The indicator of whether to use TLS\n" + " encryption when connecting to DocumentDB.\n" + " Default: false.\n" + " -u,--user <user-name> The name of the user performing the schema\n" + " operations. Required. Note: the user will\n" + " require readWrite role on the <database-name>\n" + " where the schema are stored if creating or\n" + " modifying schema.\n" + " --version Prints the version number of the command.\n" + " -x,--scan-limit <max-documents> The maximum number of documents to sample in\n" + " each collection. Used in conjunction with the\n" + " --generate-new command. Default: 1000.\n", output.toString().replaceAll("\r\n", "\n")); } @Test() @DisplayName("Tests short option names for minimum set of options.") void testMinimum() throws ParseException { final String password = UUID.randomUUID().toString(); final String[] args = new String[]{ "-g", "-s", "localhost", "-d", "database", "-u", "user", "-p", password }; final StringBuilder output = new StringBuilder(); final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine = parser.parse(COMPLETE_OPTIONS, args); final DocumentDbConnectionProperties newProperties = new DocumentDbConnectionProperties(); Assertions.assertTrue(tryGetConnectionProperties(commandLine, newProperties, output)); Assertions.assertEquals("localhost", newProperties.getHostname()); Assertions.assertEquals("database", newProperties.getDatabase()); Assertions.assertEquals("user", newProperties.getUser()); Assertions.assertEquals(SCHEMA_NAME.getDefaultValue(), newProperties.getSchemaName()); Assertions.assertFalse(newProperties.getTlsEnabled()); Assertions.assertFalse(newProperties.getTlsAllowInvalidHostnames()); } @Test() @DisplayName("Tests minimum long version option names") void testMinimumLongArgs() throws ParseException { final String password = UUID.randomUUID().toString(); final String[] args = new String[] { "--generate-new", "--server", "localhost", "--database", "database", "--user", "user", "--password", password }; final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine = parser.parse(COMPLETE_OPTIONS, args); final DocumentDbConnectionProperties newProperties = new DocumentDbConnectionProperties(); final StringBuilder output = new StringBuilder(); Assertions.assertTrue(tryGetConnectionProperties(commandLine, newProperties, output)); Assertions.assertEquals("localhost", newProperties.getHostname()); Assertions.assertEquals("database", newProperties.getDatabase()); Assertions.assertEquals("user", newProperties.getUser()); Assertions.assertEquals(SCHEMA_NAME.getDefaultValue(), newProperties.getSchemaName()); Assertions.assertFalse(newProperties.getTlsEnabled()); Assertions.assertFalse(newProperties.getTlsAllowInvalidHostnames()); } @Test() @DisplayName("Tests long option name using assignment for arguments") void testMinimumAssignedLong() throws ParseException { final String password = UUID.randomUUID().toString(); final String[] args = new String[] { "--generate-new", "--server=localhost", "--database=database", "--user=user", "--password=" + password }; final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine = parser.parse(COMPLETE_OPTIONS, args); final DocumentDbConnectionProperties newProperties = new DocumentDbConnectionProperties(); final StringBuilder output = new StringBuilder(); Assertions.assertTrue(tryGetConnectionProperties(commandLine, newProperties, output)); Assertions.assertEquals("localhost", newProperties.getHostname()); Assertions.assertEquals("database", newProperties.getDatabase()); Assertions.assertEquals("user", newProperties.getUser()); Assertions.assertEquals(SCHEMA_NAME.getDefaultValue(), newProperties.getSchemaName()); Assertions.assertFalse(newProperties.getTlsEnabled()); Assertions.assertFalse(newProperties.getTlsAllowInvalidHostnames()); } @ParameterizedTest(name = "testGenerateNew - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testGenerateNew(final DocumentDbTestEnvironment testEnvironment) throws ParseException, SQLException { setConnectionProperties(testEnvironment); final String[] args = buildArguments("-g", CUSTOM_SCHEMA_NAME); final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine = parser.parse(COMPLETE_OPTIONS, args); final DocumentDbConnectionProperties newProperties = new DocumentDbConnectionProperties(); final StringBuilder output = new StringBuilder(); Assertions.assertTrue(tryGetConnectionProperties(commandLine, newProperties, output)); Assertions.assertEquals(properties.getHostname(), newProperties.getHostname()); Assertions.assertEquals(properties.getDatabase(), newProperties.getDatabase()); Assertions.assertEquals(properties.getUser(), newProperties.getUser()); Assertions.assertEquals(CUSTOM_SCHEMA_NAME, newProperties.getSchemaName()); Assertions.assertEquals(properties.getTlsEnabled(), newProperties.getTlsEnabled()); Assertions.assertEquals(properties.getTlsAllowInvalidHostnames(), newProperties.getTlsAllowInvalidHostnames()); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("New schema '%s', version '1' generated.", CUSTOM_SCHEMA_NAME), output.toString()); } @ParameterizedTest(name = "testRemove - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testRemove(final DocumentDbTestEnvironment testEnvironment) throws ParseException, SQLException { setConnectionProperties(testEnvironment); final String collectionName1 = createSimpleCollection(testEnvironment); final String collectionName2 = createSimpleCollection(testEnvironment); try { final StringBuilder output = new StringBuilder(); String[] args = buildArguments("-g", CUSTOM_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals( String.format("New schema '%s', version '1' generated.", CUSTOM_SCHEMA_NAME), output.toString()); args = buildArguments("-r", CUSTOM_SCHEMA_NAME); final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine = parser.parse(COMPLETE_OPTIONS, args); final DocumentDbConnectionProperties newProperties = new DocumentDbConnectionProperties(); Assertions.assertTrue(tryGetConnectionProperties(commandLine, newProperties, output)); Assertions.assertEquals(properties.getHostname(), newProperties.getHostname()); Assertions.assertEquals(properties.getDatabase(), newProperties.getDatabase()); Assertions.assertEquals(properties.getUser(), newProperties.getUser()); Assertions.assertEquals(CUSTOM_SCHEMA_NAME, newProperties.getSchemaName()); Assertions.assertEquals(properties.getTlsEnabled(), newProperties.getTlsEnabled()); Assertions.assertEquals(properties.getTlsAllowInvalidHostnames(), newProperties.getTlsAllowInvalidHostnames()); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("Removed schema '%s'.", CUSTOM_SCHEMA_NAME), output.toString()); output.setLength(0); args = buildArguments("-g", CUSTOM_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals( String.format("New schema '%s', version '1' generated.", CUSTOM_SCHEMA_NAME), output.toString()); // drop the table schemas to be inconsistent. dropCollection(testEnvironment, TABLE_SCHEMA_COLLECTION); args = buildArguments("-r", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("Removed schema '%s'.", CUSTOM_SCHEMA_NAME), output.toString()); } finally { dropCollection(testEnvironment, collectionName1); dropCollection(testEnvironment, collectionName2); } } @ParameterizedTest(name = "testListSchema - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testListSchema(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setConnectionProperties(testEnvironment); final String collectionName1 = createSimpleCollection(testEnvironment); final String collectionName2 = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g"); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); args = buildArguments("-g", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions .assertEquals(String.format("New schema '%s', version '1' generated.", CUSTOM_SCHEMA_NAME), output.toString()); args = buildArguments("-l"); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format( "Name=%1$s, Version=1, SQL Name=%3$s%n" + "Name=%2$s, Version=1, SQL Name=%3$s%n", DEFAULT_SCHEMA_NAME, CUSTOM_SCHEMA_NAME, testEnvironment.getDatabaseName()), output.toString().replaceAll(", Modified=.*", "")); // Ensure listing schemas doesn't create a new schema args = buildArguments("-r", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("Removed schema '%s'.", CUSTOM_SCHEMA_NAME), output.toString()); args = buildArguments("-r", DEFAULT_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("Removed schema '%s'.", DEFAULT_SCHEMA_NAME), output.toString()); args = buildArguments("-l", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(0, output.length()); } finally { dropCollection(testEnvironment, collectionName1); dropCollection(testEnvironment, collectionName2); } } @ParameterizedTest(name = "testListTables - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testListTables(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setConnectionProperties(testEnvironment); final String collectionName1 = createSimpleCollection(testEnvironment); final String collectionName2 = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g"); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); args = buildArguments("-g", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions .assertEquals("New schema '" + CUSTOM_SCHEMA_NAME + "', version '1' generated.", output.toString()); args = buildArguments("-b"); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); final List<String> formatArgs = Arrays .stream(new String[]{collectionName1, collectionName2}).sorted() .collect(Collectors.toList()); String actual = output.toString().replace("\r\n", "\n"); Assertions.assertEquals(String.format( "%s\n" + "%s\n", formatArgs.toArray()), actual); args = buildArguments("-b", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); actual = output.toString().replace("\r\n", "\n"); formatArgs.addAll(Arrays.stream(new String[]{collectionName1, collectionName2}).sorted() .collect(Collectors.toList())); Assertions.assertEquals(String.format( "%s\n" + "%s\n", formatArgs.toArray()), actual); // Ensure listing tables doesn't create a new schema args = buildArguments("-r", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("Removed schema '%s'.", CUSTOM_SCHEMA_NAME), output.toString()); args = buildArguments("-b", CUSTOM_SCHEMA_NAME); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(0, output.length()); } finally { dropCollection(testEnvironment, collectionName1); dropCollection(testEnvironment, collectionName2); } } @ParameterizedTest(name = "testListEmpty - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testListEmpty(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setConnectionProperties(testEnvironment); final StringBuilder output = new StringBuilder(); final String[] args = buildArguments("-l"); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(0, output.length()); } @ParameterizedTest(name = "testExportStdOut - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testExportStdOut(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setConnectionProperties(testEnvironment); final String collectionName1 = createSimpleCollection(testEnvironment); final String collectionName2 = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g"); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); final String[] collections = Arrays .stream(new String[] { collectionName1, collectionName2}) .sorted() .collect(Collectors.toList()).toArray(new String[2]); args = buildArguments(String.format("-e=%s,%s", (Object[]) collections)); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals( getExpectedExportContent(collections), output.toString().replace("\r\n", "\n")); } finally { dropCollection(testEnvironment, collectionName2); dropCollection(testEnvironment, collectionName1); } } @ParameterizedTest(name = "testExportOutputFile - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testExportOutputFile(final DocumentDbTestEnvironment testEnvironment) throws SQLException, IOException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g", DEFAULT_SCHEMA_NAME); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); final String outputFileName = collectionName + " tableSchema.json"; final Path outputFilePath = USER_HOME_PATH.resolve(outputFileName); output.setLength(0); args = buildArguments("-e=" + collectionName, DEFAULT_SCHEMA_NAME, outputFileName); try { DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals("", output.toString().replace("\r\n", "\n")); readOutputFileContent(outputFilePath, output); Assertions.assertEquals( getExpectedExportContent(collectionName), output.toString().replace("\r\n", "\n")); } finally { Assertions.assertTrue(outputFilePath.toFile().delete()); } } finally { dropCollection(testEnvironment, collectionName); } } @ParameterizedTest(name = "testImportFile - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testImportFile(final DocumentDbTestEnvironment testEnvironment) throws SQLException, IOException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g", CUSTOM_SCHEMA_NAME); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("New schema '%s', version '1' generated.", CUSTOM_SCHEMA_NAME), output.toString()); final String outputFileName = collectionName + "_tableSchema.json"; final Path outputFilePath = USER_HOME_PATH.resolve(outputFileName); output.setLength(0); args = buildArguments("-e=" + collectionName, CUSTOM_SCHEMA_NAME, outputFileName); try { DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals("", output.toString().replace("\r\n", "\n")); readOutputFileContent(outputFilePath, output); Assertions.assertEquals( getExpectedExportContent(collectionName), output.toString().replace("\r\n", "\n")); output.setLength(0); args = buildArguments("-r", CUSTOM_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(String.format("Removed schema '%s'.", CUSTOM_SCHEMA_NAME), output.toString()); output.setLength(0); args = buildArguments("-b", CUSTOM_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(0, output.length()); output.setLength(0); args = buildArguments("-i=" + outputFileName, CUSTOM_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals("", output.toString()); output.setLength(0); args = buildArguments("-b", CUSTOM_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals(collectionName, output.toString().trim()); } finally { Assertions.assertTrue(outputFilePath.toFile().delete()); } } finally { dropCollection(testEnvironment, collectionName); } } @ParameterizedTest(name = "testImportFileDuplicateColumn - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testImportFileDuplicateColumn(final DocumentDbTestEnvironment testEnvironment) throws SQLException, IOException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g", DEFAULT_SCHEMA_NAME); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); final String outputFileName = collectionName + " tableSchema.json"; final Path outputFilePath = USER_HOME_PATH.resolve(outputFileName); output.setLength(0); args = buildArguments("-e=" + collectionName, DEFAULT_SCHEMA_NAME, outputFileName); try { DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals("", output.toString().replace("\r\n", "\n")); readOutputFileContent(outputFilePath, output); Assertions.assertEquals( getExpectedExportContent(collectionName), output.toString().replace("\r\n", "\n")); final String outputWithDuplicateColumnName = getExpectedExportContent( collectionName) .replace("\"sqlName\" : \"fieldDouble\"", "\"sqlName\" : \"fieldString\""); try (BufferedWriter bufferedWriter = Files .newBufferedWriter(outputFilePath, StandardCharsets.UTF_8)) { bufferedWriter.write(outputWithDuplicateColumnName); } output.setLength(0); args = buildArguments("-i=" + outputFileName, DEFAULT_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals( String.format("Duplicate column key 'fieldString' detected for" + " table schema '%s'." + " Original column 'DocumentDbSchemaColumn{fieldPath='fieldDouble'," + " sqlName='fieldString', sqlType=DOUBLE, dbType=DOUBLE, index=false," + " primaryKey=false, foreignKeyTableName='null', foreignKeyColumnName='null'}'." + " Duplicate column 'DocumentDbSchemaColumn{fieldPath='fieldString'," + " sqlName='fieldString', sqlType=VARCHAR, dbType=STRING, index=false," + " primaryKey=false, foreignKeyTableName='null', foreignKeyColumnName='null'}'.", collectionName), output.toString()); } finally { Assertions.assertTrue(outputFilePath.toFile().delete()); } } finally { dropCollection(testEnvironment, collectionName); } } @ParameterizedTest(name = "testImportUnauthorizedError - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testImportUnauthorizedError(final DocumentDbTestEnvironment testEnvironment) throws SQLException, IOException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g", DEFAULT_SCHEMA_NAME); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); final String outputFileName = collectionName + " tableSchema.json"; final Path outputFilePath = USER_HOME_PATH.resolve(outputFileName); output.setLength(0); args = buildArguments("-e=" + collectionName, DEFAULT_SCHEMA_NAME, outputFileName); try { DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals("", output.toString().replace("\r\n", "\n")); readOutputFileContent(outputFilePath, output); Assertions.assertEquals( getExpectedExportContent(collectionName), output.toString().replace("\r\n", "\n")); output.setLength(0); args = buildArguments("-i=" + outputFileName, DEFAULT_SCHEMA_NAME, null, DocumentDbConnectionProperties .getPropertiesFromConnectionString( testEnvironment.getRestrictedUserConnectionString())); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(output.toString().contains("Command failed with error 13")); } finally { Assertions.assertTrue(outputFilePath.toFile().delete()); } } finally { dropCollection(testEnvironment, collectionName); } } @ParameterizedTest(name = "testExportInvalidTable - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testExportInvalidTable(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { String[] args = buildArguments("-g"); final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(NEW_DEFAULT_SCHEMA_ANY_VERSION_PATTERN .matcher(output.toString()) .matches()); final String invalidTableName = UUID.randomUUID().toString(); args = buildArguments("-e=" + invalidTableName); output.setLength(0); DocumentDbMain.handleCommandLine(args, output); Assertions.assertTrue(output.toString().replace("\r\n", "\n").startsWith( "Requested table name(s) are not recognized in schema: " + invalidTableName + "\n" + "Available table names: ")); } finally { dropCollection(testEnvironment, collectionName); } } @DisplayName("Tests it detects an \"Unrecognized\" option") @Test void testUnrecognizedOption() throws SQLException { final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine( new String[] {"-w", "-g", "-s=localhost", "-d=test", "-u=testuser", "-p=password"}, output); Assertions.assertEquals("Unrecognized option: -w", output.toString()); } @DisplayName("Tests the help (--help) option") @Test void testHelpOption() throws SQLException { final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(new String[] {"--help"}, output); Assertions.assertEquals( "usage: " + DocumentDbMain.getLibraryName() + " [-g | -r | -l | -b | -e <[table-name[,...]]> |\n" + " -i <file-name>] -s <host-name> -d\n" + " <database-name> -u <user-name> [-p <password>]\n" + " [-n <schema-name>] [-m <method>] [-x\n" + " <max-documents>] [-t] [-a] [-o <file-name>]\n" + " [-h] [--version]\n" + " -a,--tls-allow-invalid-hostnames The indicator of whether to allow invalid\n" + " hostnames when connecting to DocumentDB.\n" + " Default: false.\n" + " -b,--list-tables Lists the SQL table names in a schema.\n" + " -d,--database <database-name> The name of the database for the schema\n" + " operations. Required.\n" + " -e,--export <[table-name[,...]]> Exports the schema to for SQL tables named\n" + " [<table-name>[,<table-name>[…]]]. If no\n" + " <table-name> are given, all table schema will\n" + " be exported. By default, the schema is\n" + " written to stdout. Use the --output option to\n" + " write to a file. The output format is JSON.\n" + " -g,--generate-new Generates a new schema for the database. This\n" + " will have the effect of replacing an existing\n" + " schema of the same name, if it exists.\n" + " -h,--help Prints the command line syntax.\n" + " -i,--import <file-name> Imports the schema from <file-name> in your\n" + " home directory. The schema will be imported\n" + " using the <schema-name> and a new version\n" + " will be added - replacing the existing\n" + " schema. The expected input format is JSON.\n" + " -l,--list-schema Lists the schema names, version and table\n" + " names available in the schema repository.\n" + " -m,--scan-method <method> The scan method to sample documents from the\n" + " collections. One of: random, idForward,\n" + " idReverse, or all. Used in conjunction with\n" + " the --generate-new command. Default: random.\n" + " -n,--schema-name <schema-name> The name of the schema. Default: _default.\n" + " -o,--output <file-name> Write the exported schema to <file-name> in\n" + " your home directory (instead of stdout). This\n" + " will overwrite any existing file with the\n" + " same name\n" + " -p,--password <password> The password for the user performing the\n" + " schema operations. Optional. If this option\n" + " is not provided, the end-user will be\n" + " prompted to enter the password directly.\n" + " -r,--remove Removes the schema from storage for schema\n" + " given by -m <schema-name>, or for schema\n" + " '_default', if not provided.\n" + " -s,--server <host-name> The hostname and optional port number\n" + " (default: 27017) in the format\n" + " hostname[:port]. Required.\n" + " -t,--tls The indicator of whether to use TLS\n" + " encryption when connecting to DocumentDB.\n" + " Default: false.\n" + " -u,--user <user-name> The name of the user performing the schema\n" + " operations. Required. Note: the user will\n" + " require readWrite role on the <database-name>\n" + " where the schema are stored if creating or\n" + " modifying schema.\n" + " --version Prints the version number of the command.\n" + " -x,--scan-limit <max-documents> The maximum number of documents to sample in\n" + " each collection. Used in conjunction with the\n" + " --generate-new command. Default: 1000.\n", output.toString().replace("\r\n", "\n")); } @DisplayName("Tests the version (--version) option") @Test void testVersionOption() throws SQLException { final StringBuilder output = new StringBuilder(); DocumentDbMain.handleCommandLine(new String[] {"--version"}, output); Assertions.assertEquals(String.format( "%s: version %s", DocumentDbMain.LIBRARY_NAME, DocumentDbMain.ARCHIVE_VERSION), output.toString()); } @ParameterizedTest(name = "testExportFileToDirectoryError - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testExportFileToDirectoryError(final DocumentDbTestEnvironment testEnvironment) throws SQLException, IOException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { final String directoryName = UUID.randomUUID().toString().replace("-", ""); final Path directoryPath = USER_HOME_PATH.resolve(directoryName); Files.createDirectory(directoryPath); try { final StringBuilder output = new StringBuilder(); final String[] args = buildArguments("-e=" + collectionName, DEFAULT_SCHEMA_NAME, directoryName); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals("Output file name must not be a directory.", output.toString()); } finally { Assertions.assertTrue(directoryPath.toFile().delete()); } } finally { dropCollection(testEnvironment, collectionName); } } private String createSimpleCollection(final DocumentDbTestEnvironment testEnvironment) throws SQLException { final String collectionName; collectionName = testEnvironment.newCollectionName(false); createSimpleCollection(testEnvironment, collectionName); return collectionName; } @ParameterizedTest(name = "testExportFileToDirectoryError - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testImportFileNotExistsError(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setConnectionProperties(testEnvironment); final String collectionName = createSimpleCollection(testEnvironment); try { final StringBuilder output = new StringBuilder(); final String[] args = buildArguments("-i=" + collectionName, DEFAULT_SCHEMA_NAME); DocumentDbMain.handleCommandLine(args, output); Assertions.assertEquals( String.format("Import file '%s' not found in your user's home folder.", collectionName), output.toString()); } finally { dropCollection(testEnvironment, collectionName); } } private void readOutputFileContent( final Path outputFilePath, final StringBuilder output) throws IOException { try (BufferedReader reader = Files .newBufferedReader(outputFilePath, StandardCharsets.UTF_8)) { String line = reader.readLine(); boolean isFirst = true; while (line != null) { if (!isFirst) { output.append(System.lineSeparator()); } isFirst = false; output.append(line); line = reader.readLine(); } } } private void setConnectionProperties(final DocumentDbTestEnvironment testEnvironment) throws SQLException { this.properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); } private String[] buildArguments(final String command) { return buildArguments(command, null); } private String[] buildArguments(final String command, final String schemaName) { return buildArguments(command, schemaName, null, this.properties); } private String[] buildArguments(final String command, final String schemaName, final String outputFileName) { return buildArguments(command, schemaName, outputFileName, this.properties); } private String[] buildArguments(final String command, final String schemaName, final String outputFileName, final DocumentDbConnectionProperties properties) { final List<String> argsList = new ArrayList<>(); argsList.add(command); argsList.add(String.format("-s=%s", properties.getHostname())); argsList.add(String.format("-d=%s", properties.getDatabase())); argsList.add(String.format("-u=%s", properties.getUser())); argsList.add(String.format("-p=%s", properties.getPassword())); if (properties.getTlsEnabled()) { argsList.add("-t"); } if (properties.getTlsAllowInvalidHostnames()) { argsList.add("-a"); } if (!isNullOrEmpty(schemaName)) { argsList.add("-n"); argsList.add(schemaName); } if (!isNullOrEmpty(outputFileName)) { argsList.add("-o"); argsList.add(outputFileName); } return argsList.toArray(new String[0]); } private void createSimpleCollection( final DocumentDbTestEnvironment testEnvironment, final String collectionName) throws SQLException { try (MongoClient client = testEnvironment.createMongoClient()) { final MongoDatabase database = client.getDatabase(testEnvironment.getDatabaseName()); final MongoCollection<BsonDocument> collection = database .getCollection(collectionName, BsonDocument.class); testEnvironment.prepareSimpleConsistentData(collection, 5); } } private void dropCollection( final DocumentDbTestEnvironment testEnvironment, final String collectionName) throws SQLException { try (MongoClient client = testEnvironment.createMongoClient()) { final MongoDatabase database = client.getDatabase(testEnvironment.getDatabaseName()); final MongoCollection<BsonDocument> collection = database .getCollection(collectionName, BsonDocument.class); collection.drop(); } } private static String getExpectedExportContent( final String... collectionNames) { if (collectionNames == null || collectionNames.length < 1) { return ""; } final StringBuilder builder = new StringBuilder(); builder.append("[ "); boolean isFirst = true; for (String collectionName : collectionNames) { if (!isFirst) { builder.append(", "); } isFirst = false; builder.append("{\n" + " \"sqlName\" : \"").append(collectionName).append("\",\n") .append(" \"collectionName\" : \"").append(collectionName).append("\",\n") .append(" \"columns\" : [ {\n").append(" \"fieldPath\" : \"_id\",\n") .append(" \"sqlName\" : \"").append(collectionName).append("__id\",\n") .append(" \"sqlType\" : \"varchar\",\n") .append(" \"dbType\" : \"object_id\",\n") .append(" \"isPrimaryKey\" : true\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldDouble\",\n") .append(" \"sqlName\" : \"fieldDouble\",\n") .append(" \"sqlType\" : \"double\",\n") .append(" \"dbType\" : \"double\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldString\",\n") .append(" \"sqlName\" : \"fieldString\",\n") .append(" \"sqlType\" : \"varchar\",\n") .append(" \"dbType\" : \"string\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldObjectId\",\n") .append(" \"sqlName\" : \"fieldObjectId\",\n") .append(" \"sqlType\" : \"varchar\",\n") .append(" \"dbType\" : \"object_id\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldBoolean\",\n") .append(" \"sqlName\" : \"fieldBoolean\",\n") .append(" \"sqlType\" : \"boolean\",\n") .append(" \"dbType\" : \"boolean\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldDate\",\n") .append(" \"sqlName\" : \"fieldDate\",\n") .append(" \"sqlType\" : \"timestamp\",\n") .append(" \"dbType\" : \"date_time\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldInt\",\n") .append(" \"sqlName\" : \"fieldInt\",\n") .append(" \"sqlType\" : \"integer\",\n") .append(" \"dbType\" : \"int32\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldLong\",\n") .append(" \"sqlName\" : \"fieldLong\",\n") .append(" \"sqlType\" : \"bigint\",\n") .append(" \"dbType\" : \"int64\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldMaxKey\",\n") .append(" \"sqlName\" : \"fieldMaxKey\",\n") .append(" \"sqlType\" : \"varchar\",\n") .append(" \"dbType\" : \"max_key\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldMinKey\",\n") .append(" \"sqlName\" : \"fieldMinKey\",\n") .append(" \"sqlType\" : \"varchar\",\n") .append(" \"dbType\" : \"min_key\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldNull\",\n") .append(" \"sqlName\" : \"fieldNull\",\n") .append(" \"sqlType\" : \"null\",\n") .append(" \"dbType\" : \"null\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldBinary\",\n") .append(" \"sqlName\" : \"fieldBinary\",\n") .append(" \"sqlType\" : \"varbinary\",\n") .append(" \"dbType\" : \"binary\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldDecimal128\",\n") .append(" \"sqlName\" : \"fieldDecimal128\",\n") .append(" \"sqlType\" : \"decimal\",\n") .append(" \"dbType\" : \"decimal128\"\n").append(" }, {\n") .append(" \"fieldPath\" : \"fieldTimestamp\",\n") .append(" \"sqlName\" : \"fieldTimestamp\",\n") .append(" \"sqlType\" : \"timestamp\",\n") .append(" \"dbType\" : \"timestamp\"\n") .append(" } ]\n") .append("}"); } builder.append(" ]"); return builder.toString(); } }
4,500
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbResultSetMetaDataTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.result.InsertOneResult; import org.bson.BsonArray; import org.bson.BsonDocument; import org.bson.BsonInt32; import org.bson.BsonObjectId; import org.bson.BsonString; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; import java.util.Properties; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbResultSetMetaDataTest extends DocumentDbFlapDoodleTest { private static final String USERNAME = "user"; private static final String PASSWORD = "password"; private static final String DATABASE = "testDb"; private static final String HOSTNAME = "localhost"; private static final String COLLECTION_SIMPLE = "COLLECTION_SIMPLE"; private static final String COLLECTION_COMPLEX = "COLLECTION_COMPLEX"; private static final String CONNECTION_STRING_TEMPLATE = "jdbc:documentdb://%s:%s@%s:%s/%s?tls=false"; /** Initializes the test class. */ @BeforeAll void initialize() { // Add 1 valid user so we can successfully authenticate. createUser(DATABASE, USERNAME, PASSWORD); prepareSimpleConsistentData(DATABASE, COLLECTION_SIMPLE, 5, USERNAME, PASSWORD); addComplexData(); } @AfterEach void afterEach() throws Exception { final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(new Properties(), getJdbcConnectionString(), "jdbc:documentdb:"); try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, null)) { schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); } } /** * Tests resultSet.getMetadata * @throws SQLException if connection fails. */ @Test @DisplayName("Tests metadata of a database with simple data.") void testGetResultSetMetadataSimple() throws SQLException { final String connectionString = getJdbcConnectionString(); try (final Connection connection = DriverManager.getConnection(connectionString); final DocumentDbStatement statement = (DocumentDbStatement) connection.createStatement(); final ResultSet resultSet = statement.executeQuery(String.format("SELECT * FROM \"%s\"", COLLECTION_SIMPLE))) { final ResultSetMetaData metadata = resultSet.getMetaData(); Assertions.assertEquals(13, metadata.getColumnCount()); Assertions.assertEquals(COLLECTION_SIMPLE, metadata.getTableName(1)); Assertions.assertNull(metadata.getCatalogName(1)); Assertions.assertEquals(DATABASE, metadata.getSchemaName(1)); Assertions.assertEquals(COLLECTION_SIMPLE + "__id", metadata.getColumnName(1)); Assertions.assertEquals(COLLECTION_SIMPLE + "__id", metadata.getColumnLabel(1)); Assertions.assertEquals("VARCHAR", metadata.getColumnTypeName(1)); Assertions.assertEquals("java.lang.String", metadata.getColumnClassName(1)); Assertions.assertEquals(Types.VARCHAR, metadata.getColumnType(1)); Assertions.assertEquals(0, metadata.isNullable(1)); Assertions.assertEquals(65536, metadata.getPrecision(1)); Assertions.assertEquals(65536, metadata.getColumnDisplaySize(1)); Assertions.assertTrue(metadata.isReadOnly(1)); Assertions.assertTrue(metadata.isSigned(1)); Assertions.assertTrue(metadata.isCaseSensitive(1)); Assertions.assertFalse(metadata.isSearchable(1)); Assertions.assertFalse(metadata.isWritable(1)); Assertions.assertFalse(metadata.isAutoIncrement(1)); Assertions.assertFalse(metadata.isCurrency(1)); Assertions.assertFalse(metadata.isDefinitelyWritable(1)); Assertions.assertEquals("fieldDouble", metadata.getColumnName(2)); Assertions.assertEquals("DOUBLE", metadata.getColumnTypeName(2)); Assertions.assertEquals(1, metadata.isNullable(2)); Assertions.assertEquals(0, metadata.getScale(2)); Assertions.assertEquals("fieldString", metadata.getColumnName(3)); Assertions.assertEquals("VARCHAR", metadata.getColumnTypeName(3)); Assertions.assertEquals("fieldObjectId", metadata.getColumnName(4)); Assertions.assertEquals("VARCHAR", metadata.getColumnTypeName(4)); Assertions.assertEquals("fieldBoolean", metadata.getColumnName(5)); Assertions.assertEquals("BOOLEAN", metadata.getColumnTypeName(5)); Assertions.assertEquals("fieldDate", metadata.getColumnName(6)); Assertions.assertEquals("TIMESTAMP", metadata.getColumnTypeName(6)); Assertions.assertEquals("fieldInt", metadata.getColumnName(7)); Assertions.assertEquals("INTEGER", metadata.getColumnTypeName(7)); Assertions.assertEquals("fieldLong", metadata.getColumnName(8)); Assertions.assertEquals("BIGINT", metadata.getColumnTypeName(8)); Assertions.assertEquals("fieldMaxKey", metadata.getColumnName(9)); Assertions.assertEquals("VARCHAR", metadata.getColumnTypeName(9)); Assertions.assertEquals("fieldMinKey", metadata.getColumnName(10)); Assertions.assertEquals("VARCHAR", metadata.getColumnTypeName(10)); Assertions.assertEquals("fieldNull", metadata.getColumnName(11)); Assertions.assertEquals("VARCHAR", metadata.getColumnTypeName(11)); Assertions.assertEquals("fieldBinary", metadata.getColumnName(12)); Assertions.assertEquals("VARBINARY", metadata.getColumnTypeName(12)); Assertions.assertEquals("fieldDecimal128", metadata.getColumnName(13)); Assertions.assertEquals("DECIMAL", metadata.getColumnTypeName(13)); } } /** * Test for complex databases */ @Test @DisplayName("Tests metadata of a database with nested documents and an array.") void testResultSetGetMetadataComplex() throws SQLException { final String connectionString = getJdbcConnectionString(); try (final Connection connection = DriverManager.getConnection(connectionString); final DocumentDbStatement statement = (DocumentDbStatement) connection.createStatement(); final ResultSet outerTableResultSet = statement.executeQuery(String.format("SELECT * FROM \"%s\"", COLLECTION_COMPLEX)); final ResultSet levelOneNestedTable = statement.executeQuery( String.format("SELECT * FROM \"%s\"", COLLECTION_COMPLEX + "_innerDocument")); final ResultSet levelOneNestedTableTwo = statement.executeQuery( String.format("SELECT * FROM \"%s\"", COLLECTION_COMPLEX + "_innerDocumentTwo")); final ResultSet levelTwoNestedTable = statement.executeQuery( String.format( "SELECT * FROM \"%s\"", COLLECTION_COMPLEX + "_innerDocument_levelTwoDocument")); final ResultSet arrayTable = statement.executeQuery( String.format("SELECT * FROM \"%s\"", COLLECTION_COMPLEX + "_array"))) { Assertions.assertNotNull(outerTableResultSet); final ResultSetMetaData outerMetadata = outerTableResultSet.getMetaData(); Assertions.assertEquals(2, outerMetadata.getColumnCount()); Assertions.assertEquals(COLLECTION_COMPLEX + "__id", outerMetadata.getColumnName(1)); Assertions.assertEquals("count", outerMetadata.getColumnName(2)); Assertions.assertEquals("VARCHAR", outerMetadata.getColumnTypeName(1)); Assertions.assertEquals("INTEGER", outerMetadata.getColumnTypeName(2)); Assertions.assertNotNull(levelOneNestedTable); final ResultSetMetaData innerMetadata = levelOneNestedTable.getMetaData(); Assertions.assertEquals(2, innerMetadata.getColumnCount()); Assertions.assertEquals(COLLECTION_COMPLEX + "__id", innerMetadata.getColumnName(1)); Assertions.assertEquals("levelOneString", innerMetadata.getColumnName(2)); Assertions.assertEquals("VARCHAR", innerMetadata.getColumnTypeName(1)); Assertions.assertEquals("VARCHAR", innerMetadata.getColumnTypeName(2)); Assertions.assertNotNull(levelOneNestedTableTwo); final ResultSetMetaData innerMetadataTwo = levelOneNestedTableTwo.getMetaData(); Assertions.assertEquals(2, innerMetadataTwo.getColumnCount()); Assertions.assertEquals(COLLECTION_COMPLEX + "__id", innerMetadata.getColumnName(1)); Assertions.assertEquals("levelOneInt", innerMetadataTwo.getColumnName(2)); Assertions.assertEquals("VARCHAR", innerMetadataTwo.getColumnTypeName(1)); Assertions.assertEquals("INTEGER", innerMetadataTwo.getColumnTypeName(2)); Assertions.assertNotNull(levelTwoNestedTable); final ResultSetMetaData levelTwoMetadata = levelTwoNestedTable.getMetaData(); Assertions.assertEquals(3, levelTwoMetadata.getColumnCount()); Assertions.assertEquals(COLLECTION_COMPLEX + "__id", levelTwoMetadata.getColumnName(1)); Assertions.assertEquals("levelTwoInt", levelTwoMetadata.getColumnName(2)); Assertions.assertEquals("levelTwoField", levelTwoMetadata.getColumnName(3)); Assertions.assertEquals("VARCHAR", levelTwoMetadata.getColumnTypeName(1)); Assertions.assertEquals("INTEGER", levelTwoMetadata.getColumnTypeName(2)); Assertions.assertEquals("VARCHAR", levelTwoMetadata.getColumnTypeName(3)); Assertions.assertNotNull(arrayTable); final ResultSetMetaData arrayMetadata = arrayTable.getMetaData(); Assertions.assertEquals(3, arrayMetadata.getColumnCount()); Assertions.assertEquals(COLLECTION_COMPLEX + "__id", arrayMetadata.getColumnName(1)); Assertions.assertEquals("array_index_lvl_0", arrayMetadata.getColumnName(2)); Assertions.assertEquals("value", arrayMetadata.getColumnName(3)); Assertions.assertEquals("VARCHAR", arrayMetadata.getColumnTypeName(1)); Assertions.assertEquals("BIGINT", arrayMetadata.getColumnTypeName(2)); Assertions.assertEquals("INTEGER", arrayMetadata.getColumnTypeName(3)); } } /** * Test for complex databases */ @Test @DisplayName("Tests attempting to retrieve metadata with invalid indices.") void testResultSetGetMetadataInvalidIndices() throws SQLException { final String connectionString = getJdbcConnectionString(); try (final Connection connection = DriverManager.getConnection(connectionString); final DocumentDbStatement statement = (DocumentDbStatement) connection.createStatement(); final ResultSet resultSet = statement.executeQuery(String.format("SELECT * FROM \"%s\"", COLLECTION_SIMPLE))) { final ResultSetMetaData metadata = resultSet.getMetaData(); Assertions.assertEquals(13, metadata.getColumnCount()); // Attempt to get 0th column. Assertions.assertEquals( SqlError.lookup(SqlError.INVALID_INDEX, 0, 13), Assertions.assertThrows(SQLException.class, () -> metadata.getColumnName(0)) .getMessage()); // Attempt to get 14th column. Assertions.assertEquals( SqlError.lookup(SqlError.INVALID_INDEX, 14, 13), Assertions.assertThrows(SQLException.class, () -> metadata.getColumnName(14)) .getMessage()); } } private String getJdbcConnectionString() { return String.format( CONNECTION_STRING_TEMPLATE, USERNAME, PASSWORD, HOSTNAME, getMongoPort(), DATABASE); } /** * Adds data with second level nested documents as well as an array */ private void addComplexData() { final MongoClient client = createMongoClient("admin", USERNAME, PASSWORD); final MongoDatabase database = client.getDatabase(DATABASE); final MongoCollection<BsonDocument> collection = database.getCollection(COLLECTION_COMPLEX, BsonDocument.class); for (int count = 0; count < 5; count++) { final BsonDocument levelTwoDocument = new BsonDocument() .append("levelTwoInt", new BsonInt32(2)) .append("levelTwoField", new BsonString("string")); final BsonDocument innerDocument = new BsonDocument() .append("levelOneString", new BsonString("levelOne")) .append("levelTwoDocument", levelTwoDocument); final BsonDocument innerDocumentTwo = new BsonDocument() .append("levelOneInt", new BsonInt32(2)); final BsonArray array = new BsonArray(); array.add(new BsonInt32(3)); array.add(new BsonInt32(4)); final BsonDocument outerDocument = new BsonDocument() .append("_id", new BsonObjectId()) .append("count", new BsonInt32(count)) .append("innerDocument", innerDocument) .append("innerDocumentTwo", innerDocumentTwo) .append("array", array); final InsertOneResult result = collection.insertOne(outerDocument); Assertions.assertEquals(count + 1, collection.countDocuments()); Assertions.assertEquals(outerDocument.getObjectId("_id"), result.getInsertedId()); } } }
4,501
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbStatementTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.bson.BsonDocument; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.params.provider.Arguments; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Arrays; import java.util.stream.Stream; import static software.amazon.documentdb.jdbc.DocumentDbMetadataScanMethod.RANDOM; class DocumentDbStatementTest { private DocumentDbTestEnvironment testEnvironment; @BeforeAll static void setup() throws Exception { // Start the test environments. for (DocumentDbTestEnvironment testEnvironment : DocumentDbTestEnvironmentFactory.getConfiguredEnvironments()) { testEnvironment.start(); } } @AfterEach void afterEach() throws Exception { final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, null)) { schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); } } @AfterAll static void teardown() throws Exception { // Stop the test environments. for (DocumentDbTestEnvironment testEnvironment : DocumentDbTestEnvironmentFactory.getConfiguredEnvironments()) { testEnvironment.stop(); } } protected static Stream<DocumentDbTestEnvironment> getTestEnvironments() { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream(); } protected static Stream<Arguments> getTestEnvironmentsForScanMethods() { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream() .flatMap( env -> Arrays.stream(DocumentDbMetadataScanMethod.values()) .flatMap(method -> Stream.of(Arguments.arguments(env, method)))); } protected void setTestEnvironment(final DocumentDbTestEnvironment testEnvironment) { this.testEnvironment = testEnvironment; } protected void insertBsonDocuments(final String collection, final BsonDocument[] documents) throws SQLException { this.testEnvironment.insertBsonDocuments(collection, documents); } protected String getDatabaseName() { return this.testEnvironment.getDatabaseName(); } protected DocumentDbStatement getDocumentDbStatement( final Connection connection) throws SQLException { final DocumentDbStatement statement = (DocumentDbStatement) connection.createStatement(); Assertions.assertNotNull(statement); return statement; } protected Connection getConnection() throws SQLException { return DriverManager.getConnection(getJdbcConnectionString(RANDOM)); } protected Connection getConnection(final DocumentDbMetadataScanMethod scanMethod) throws SQLException { return DriverManager.getConnection(getJdbcConnectionString(scanMethod)); } protected String getJdbcConnectionString(final DocumentDbMetadataScanMethod scanMethod) { return this.testEnvironment.getJdbcConnectionString(scanMethod); } /** * Prepares data for a given database and collection. * @param collectionName - the name of the collection to insert data into. * @param recordCount - the number of records to insert data into. */ protected void prepareSimpleConsistentData( final String collectionName, final int recordCount) throws SQLException { try (MongoClient client = this.testEnvironment.createMongoClient()) { final MongoDatabase database = client.getDatabase(getDatabaseName()); final MongoCollection<BsonDocument> collection = database.getCollection(collectionName, BsonDocument.class); this.testEnvironment.prepareSimpleConsistentData(collection, recordCount); } } }
4,502
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbResultSetTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import com.google.common.io.BaseEncoding; import com.google.common.io.ByteStreams; import com.google.common.io.CharStreams; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCursor; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.bson.BsonBinary; import org.bson.BsonDateTime; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonRegularExpression; import org.bson.BsonString; import org.bson.BsonTimestamp; import org.bson.Document; import org.bson.types.ObjectId; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.io.IOException; import java.io.InputStreamReader; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.time.Instant; import java.util.Calendar; import java.util.Properties; import java.util.TimeZone; import java.util.concurrent.TimeUnit; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbResultSetTest extends DocumentDbFlapDoodleTest { private static final int MOCK_FETCH_SIZE = 20; private static final String DATABASE_NAME = "resultDatabase"; private static final String TEST_USER = "user"; private static final String TEST_PASSWORD = "password"; private static final String CONNECTION_STRING_TEMPLATE = "jdbc:documentdb://%s:%s@localhost:%s/%s?tls=false&scanMethod=%s"; private static MongoClient client; private static Connection connection; private static Statement statement; private ResultSet resultSetFlapdoodle; @Mock private DocumentDbStatement mockStatement; @Mock private MongoCursor<Document> iterator; private DocumentDbResultSet resultSet; @BeforeAll @SuppressFBWarnings(value = "HARD_CODE_PASSWORD", justification = "Hardcoded for test purposes only") static void initialize() { // Add a valid users to the local MongoDB instance. client = createMongoClient("admin", "admin", "admin"); createUser(DATABASE_NAME, TEST_USER, TEST_PASSWORD); } @BeforeEach void init() throws SQLException { MockitoAnnotations.openMocks(this); Mockito.when(mockStatement.getFetchSize()).thenReturn(MOCK_FETCH_SIZE); } @AfterEach void afterEach() throws Exception { final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString( new Properties(), getJdbcConnectionString(), "jdbc:documentdb:"); try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, null)) { schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); } } @AfterAll void close() throws SQLException { resultSetFlapdoodle.close(); statement.close(); connection.close(); } @Test @DisplayName("Test that next() moves cursor to correct row and handles invalid inputs.") void testNext() throws SQLException { final Document doc1 = Document.parse("{\"_id\": \"key1\"}"); final Document doc2 = Document.parse("{\"_id\": \"key2\"}"); final Document doc3 = Document.parse("{\"_id\": \"key3\"}"); Mockito.when(iterator.next()).thenReturn(doc1).thenReturn(doc2).thenReturn(doc3); final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); resultSet = new DocumentDbResultSet(mockStatement, iterator, ImmutableList.of(column), ImmutableList.of("_id")); // Test cursor before first row. Mockito.when(iterator.hasNext()).thenReturn(true); Assertions.assertTrue(resultSet.isBeforeFirst()); Assertions.assertFalse(resultSet.isFirst()); Assertions.assertFalse(resultSet.isLast()); Assertions.assertFalse(resultSet.isAfterLast()); Assertions.assertEquals(-1, resultSet.getRowIndex()); Assertions.assertEquals(0, resultSet.getRow()); // Test cursor at first row. Assertions.assertTrue(resultSet.next()); Assertions.assertFalse(resultSet.isBeforeFirst()); Assertions.assertTrue(resultSet.isFirst()); Assertions.assertFalse(resultSet.isLast()); Assertions.assertFalse(resultSet.isAfterLast()); Assertions.assertEquals(0, resultSet.getRowIndex()); Assertions.assertEquals(1, resultSet.getRow()); // Test cursor at second row. Assertions.assertTrue(resultSet.next()); Assertions.assertFalse(resultSet.isBeforeFirst()); Assertions.assertFalse(resultSet.isFirst()); Assertions.assertFalse(resultSet.isLast()); Assertions.assertFalse(resultSet.isAfterLast()); Assertions.assertEquals(1, resultSet.getRowIndex()); Assertions.assertEquals(2, resultSet.getRow()); // Test cursor at last row. Mockito.when(iterator.hasNext()).thenReturn(true).thenReturn(false); Assertions.assertTrue(resultSet.next()); Assertions.assertFalse(resultSet.isBeforeFirst()); Assertions.assertFalse(resultSet.isFirst()); Assertions.assertTrue(resultSet.isLast()); Assertions.assertFalse(resultSet.isAfterLast()); Assertions.assertEquals(2, resultSet.getRowIndex()); Assertions.assertEquals(3, resultSet.getRow()); // Test cursor after last row. Assertions.assertFalse(resultSet.next()); Assertions.assertFalse(resultSet.isBeforeFirst()); Assertions.assertFalse(resultSet.isFirst()); Assertions.assertFalse(resultSet.isLast()); Assertions.assertTrue(resultSet.isAfterLast()); Assertions.assertEquals(2, resultSet.getRowIndex()); Assertions.assertEquals(0, resultSet.getRow()); } @Test @DisplayName("Test that absolute() moves cursor to correct row and handles invalid inputs.") void testAbsolute() throws SQLException { final Document doc1 = Document.parse("{\"_id\": \"key1\"}"); final Document doc2 = Document.parse("{\"_id\": \"key2\"}"); final Document doc3 = Document.parse("{\"_id\": \"key3\"}"); Mockito.when(iterator.next()).thenReturn(doc1).thenReturn(doc2).thenReturn(doc3); final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); resultSet = new DocumentDbResultSet(mockStatement, iterator, ImmutableList.of(column), ImmutableList.of("_id")); // Test going to negative row number. (0 -> -1) Mockito.when(iterator.hasNext()).thenReturn(true); Assertions.assertEquals( "The row value must be greater than 1.", Assertions.assertThrows(SQLException.class, () -> resultSet.absolute(-1)).getMessage()); Assertions.assertEquals(-1, resultSet.getRowIndex()); Assertions.assertEquals(0, resultSet.getRow()); // Test going to valid row number. (0 -> 2) Assertions.assertTrue(resultSet.absolute(2)); Assertions.assertEquals(1, resultSet.getRowIndex()); Assertions.assertEquals(2, resultSet.getRow()); // Test going to previous row number. (2 -> 1) Assertions.assertEquals( SqlError.lookup(SqlError.RESULT_FORWARD_ONLY), Assertions.assertThrows(SQLException.class, () -> resultSet.absolute(1)).getMessage()); Assertions.assertEquals(1, resultSet.getRowIndex()); Assertions.assertEquals(2, resultSet.getRow()); // Test going to out of range row number. (2 -> 4) Mockito.when(iterator.hasNext()).thenReturn(true).thenReturn(false); Assertions.assertFalse(resultSet.absolute(4)); Assertions.assertEquals(2, resultSet.getRowIndex()); Assertions.assertEquals(0, resultSet.getRow()); } @Test @DisplayName("Test that relative() moves cursor to correct row and handles invalid inputs.") void testRelative() throws SQLException { final Document doc1 = Document.parse("{\"_id\": \"key1\"}"); final Document doc2 = Document.parse("{\"_id\": \"key2\"}"); final Document doc3 = Document.parse("{\"_id\": \"key3\"}"); Mockito.when(iterator.next()).thenReturn(doc1).thenReturn(doc2).thenReturn(doc3); final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); resultSet = new DocumentDbResultSet(mockStatement, iterator, ImmutableList.of(column), ImmutableList.of("_id")); // Test going to valid row number. (0 -> 2) Mockito.when(iterator.hasNext()).thenReturn(true); Assertions.assertTrue(resultSet.relative(2)); Assertions.assertEquals(1, resultSet.getRowIndex()); Assertions.assertEquals(2, resultSet.getRow()); // Test going to previous row number. (2 -> 1) Assertions.assertEquals( SqlError.lookup(SqlError.RESULT_FORWARD_ONLY), Assertions.assertThrows(SQLException.class, () -> resultSet.relative(-1)).getMessage()); Assertions.assertEquals(1, resultSet.getRowIndex()); Assertions.assertEquals(2, resultSet.getRow()); // Test staying in same row. (2 -> 2) Assertions.assertTrue(resultSet.relative(0)); Assertions.assertEquals(1, resultSet.getRowIndex()); Assertions.assertEquals(2, resultSet.getRow()); // Test going to out of range row number. (2 -> 4) Mockito.when(iterator.hasNext()).thenReturn(true).thenReturn(false); Assertions.assertFalse(resultSet.relative(2)); Assertions.assertEquals(2, resultSet.getRowIndex()); Assertions.assertEquals(0, resultSet.getRow()); } @Test @DisplayName("Test that close() closes the Mongo cursor and result set.") void testClose() throws SQLException { final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); resultSet = new DocumentDbResultSet(mockStatement, iterator, ImmutableList.of(column), ImmutableList.of("_id")); // Test close. Assertions.assertDoesNotThrow(() -> resultSet.close()); Assertions.assertTrue(resultSet.isClosed()); Mockito.verify(iterator, Mockito.times(1)).close(); // Attempt to close twice. Assertions.assertDoesNotThrow(() -> resultSet.close()); Assertions.assertTrue(resultSet.isClosed()); // Attempt to use closed result set. Assertions.assertEquals( SqlError.lookup(SqlError.RESULT_SET_CLOSED), Assertions.assertThrows(SQLException.class, () -> resultSet.next()).getMessage()); } @Test @DisplayName("Tests that findColumn() returns the correct 1-based column index.") void testFindColumn() throws SQLException { final JdbcColumnMetaData column1 = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); final JdbcColumnMetaData column2 = JdbcColumnMetaData.builder().columnLabel("value").ordinal(1).build(); final JdbcColumnMetaData column3 = JdbcColumnMetaData.builder().columnLabel("Value").ordinal(2).build(); final ImmutableList<JdbcColumnMetaData> columnMetaData = ImmutableList.of(column1, column2, column3); resultSet = new DocumentDbResultSet(mockStatement, iterator, columnMetaData, ImmutableList.of("_id")); Assertions.assertEquals(2, resultSet.findColumn("value")); Assertions.assertEquals(3, resultSet.findColumn("Value")); Assertions.assertEquals( SqlError.lookup(SqlError.INVALID_COLUMN_LABEL, "value2"), Assertions.assertThrows(SQLException.class, () -> resultSet.findColumn("value2")) .getMessage()); } @Test @DisplayName("Tests that fetch size can be set and get successfully.") void testGetAndSetFetchSize() throws SQLException { final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); final ImmutableList<JdbcColumnMetaData> columnMetaData = ImmutableList.of(column); resultSet = new DocumentDbResultSet(mockStatement, iterator, columnMetaData, ImmutableList.of("_id")); Assertions.assertEquals(MOCK_FETCH_SIZE, resultSet.getFetchSize()); Assertions.assertDoesNotThrow(() -> resultSet.setFetchSize(10)); Assertions.assertEquals(10, resultSet.getFetchSize()); } @Test @DisplayName("Test verifyRow and verifyColumnIndex") void testVerifyRowVerifyColumnIndex() throws SQLException { final Document doc1 = Document.parse("{\"_id\": null }"); final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("_id").ordinal(0).build(); resultSet = new DocumentDbResultSet(mockStatement, iterator, ImmutableList.of(column), ImmutableList.of("_id")); // Try access before first row. Assertions.assertEquals( SqlError.lookup(SqlError.BEFORE_FIRST), Assertions.assertThrows(SQLException.class, () -> resultSet.getString(1)) .getMessage()); // Move to first row. Mockito.when(iterator.hasNext()).thenReturn(true); Mockito.when(iterator.next()).thenReturn(doc1); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(Assertions.assertDoesNotThrow(() -> resultSet.getString(1))); Assertions.assertEquals("Invalid index (2), indexes must be between 1 and 1 (inclusive).", Assertions.assertThrows(SQLException.class, () -> resultSet.getString(2)) .getMessage()); Assertions.assertEquals("Invalid index (0), indexes must be between 1 and 1 (inclusive).", Assertions.assertThrows(SQLException.class, () -> resultSet.getString(0)) .getMessage()); Assertions.assertEquals("Invalid index (-1), indexes must be between 1 and 1 (inclusive).", Assertions.assertThrows(SQLException.class, () -> resultSet.getString(-1)) .getMessage()); // Move past last row. Mockito.when(iterator.hasNext()).thenReturn(false); Assertions.assertFalse(resultSet.next()); Assertions.assertEquals(SqlError.lookup(SqlError.AFTER_LAST), Assertions.assertThrows(SQLException.class, () -> resultSet.getString(1)) .getMessage()); } @Test @DisplayName("Tests get from string") void testGetString() throws SQLException, IOException { final String collection = "resultSetTestString"; final Document document = Document.parse("{\"_id\": \"key1\"}"); document.append("field1", new BsonString("30")); document.append("field2", new BsonString("语言处理")); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals("30", resultSetFlapdoodle.getString(2)); Assertions.assertEquals("30", resultSetFlapdoodle.getObject(2)); Assertions.assertEquals("30", resultSetFlapdoodle.getObject(2, String.class)); Assertions.assertEquals("30", resultSetFlapdoodle.getNString(2)); Assertions.assertEquals("30", CharStreams.toString(new InputStreamReader(resultSetFlapdoodle.getAsciiStream(2), StandardCharsets.US_ASCII))); Assertions.assertEquals("30", CharStreams.toString(resultSetFlapdoodle.getCharacterStream(2))); Assertions.assertEquals("30", CharStreams.toString(resultSetFlapdoodle.getNCharacterStream(2))); Assertions.assertEquals("30", resultSetFlapdoodle.getClob(2).getSubString(1, 2)); // Retrieve non-ascii string. Assertions.assertEquals("语言处理", resultSetFlapdoodle.getString(3)); Assertions.assertEquals("语言处理", resultSetFlapdoodle.getObject(3)); Assertions.assertEquals("语言处理", resultSetFlapdoodle.getObject(3, String.class)); Assertions.assertEquals("语言处理", resultSetFlapdoodle.getNString(3)); Assertions.assertEquals("语言处理", CharStreams.toString(resultSetFlapdoodle.getCharacterStream(3))); Assertions.assertEquals("语言处理", CharStreams.toString(resultSetFlapdoodle.getNCharacterStream(3))); Assertions.assertEquals("语言处理", resultSetFlapdoodle.getClob(3).getSubString(1, 4)); } @Test @DisplayName("Tests get from int") void testGetInt() throws SQLException { final String collection = "resultSetTestInt"; final Document document = Document.parse("{\"_id\": \"key1\", \"field\": 3}"); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(new BigDecimal("3"), resultSetFlapdoodle.getBigDecimal(2)); Assertions.assertEquals(3, resultSetFlapdoodle.getDouble(2), 0.01); Assertions.assertEquals(3, resultSetFlapdoodle.getFloat(2), 0.01); Assertions.assertEquals(3, resultSetFlapdoodle.getInt(2)); Assertions.assertEquals(3, resultSetFlapdoodle.getLong(2)); Assertions.assertEquals(3, resultSetFlapdoodle.getShort(2)); Assertions.assertEquals(3, resultSetFlapdoodle.getObject(2)); Assertions.assertEquals(3, resultSetFlapdoodle.getByte(2)); } @Test @DisplayName("Tests get from double") void testGetDouble() throws SQLException { final String collection = "resultSetTestDouble"; final Document document = Document.parse("{\"_id\": \"key1\", \"field\": 1.5}"); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(new BigDecimal("1.5"), resultSetFlapdoodle.getBigDecimal(2)); Assertions.assertEquals(1.5, resultSetFlapdoodle.getDouble(2), 0.01); Assertions.assertEquals(1.5, resultSetFlapdoodle.getFloat(2), 0.01); Assertions.assertEquals(1, resultSetFlapdoodle.getInt(2)); Assertions.assertEquals(1, resultSetFlapdoodle.getLong(2)); Assertions.assertEquals(1, resultSetFlapdoodle.getShort(2)); Assertions.assertEquals("1.5", resultSetFlapdoodle.getString(2)); Assertions.assertEquals(1.5, resultSetFlapdoodle.getObject(2)); } @Test @DisplayName("Tests get from int64") void testGetInt64() throws SQLException { final String collection = "resultSetTestInt64"; final Document document = Document.parse("{\"_id\": \"key1\"}"); document.append("field", new BsonInt64(1000000000000L)); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(new BigDecimal("1000000000000"), resultSetFlapdoodle.getBigDecimal(2)); Assertions.assertEquals(1000000000000d, resultSetFlapdoodle.getDouble(2), 1); Assertions.assertEquals(1000000000000f, resultSetFlapdoodle.getFloat(2), 1000); Assertions.assertEquals(0, resultSetFlapdoodle.getInt(2)); // getInt returns default value 0 if result > max value Assertions.assertEquals(1000000000000L, resultSetFlapdoodle.getLong(2)); Assertions.assertEquals(0, resultSetFlapdoodle.getShort(2)); // getShort returns default value 0 if result > max value Assertions.assertEquals("1000000000000", resultSetFlapdoodle.getString(2)); Assertions.assertEquals(1000000000000L, resultSetFlapdoodle.getObject(2)); } @Test @DisplayName("Tests get from null") void testGetNull() throws SQLException { final String collection = "resultSetTestNull"; final Document document = Document.parse("{\"_id\": \"key1\"}"); document.append("field", new BsonNull()); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); // Check where default is 0. Assertions.assertEquals(new BigDecimal("0"), resultSetFlapdoodle.getBigDecimal(2)); Assertions.assertEquals(0, resultSetFlapdoodle.getDouble(2), 1); Assertions.assertEquals(0, resultSetFlapdoodle.getFloat(2), 1000); Assertions.assertEquals(0, resultSetFlapdoodle.getInt(2)); Assertions.assertEquals(0, resultSetFlapdoodle.getLong(2)); Assertions.assertEquals(0, resultSetFlapdoodle.getShort(2)); Assertions.assertTrue(resultSetFlapdoodle.wasNull()); // Check where default is null. Assertions.assertNull(resultSetFlapdoodle.getString(2)); Assertions.assertNull(resultSetFlapdoodle.getObject(2)); Assertions.assertNull(resultSetFlapdoodle.getDate(2)); Assertions.assertNull(resultSetFlapdoodle.getTimestamp(2)); Assertions.assertNull(resultSetFlapdoodle.getClob(2)); Assertions.assertNull(resultSetFlapdoodle.getBlob(2)); Assertions.assertNull(resultSetFlapdoodle.getCharacterStream(2)); Assertions.assertNull(resultSetFlapdoodle.getAsciiStream(2)); Assertions.assertNull(resultSetFlapdoodle.getBinaryStream(2)); Assertions.assertTrue(resultSetFlapdoodle.wasNull()); } @Test @DisplayName("Tests that getters from nested documents work.") void testGetNested() throws SQLException { final String collection = "resultSetTestNested"; final Document document = Document.parse("{\"_id\": \"key1\", " + "\"extraField\": \"string\"," + "\"subdocument\": " + "{\"field\": 4}}"); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection + "_subdocument")); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(new BigDecimal("4"), resultSetFlapdoodle.getBigDecimal(2)); Assertions.assertEquals(4, resultSetFlapdoodle.getDouble(2), 0.1); Assertions.assertEquals(4, resultSetFlapdoodle.getFloat(2), 0.1); Assertions.assertEquals(4, resultSetFlapdoodle.getInt(2)); Assertions.assertEquals(4L, resultSetFlapdoodle.getLong(2)); Assertions.assertEquals(4, resultSetFlapdoodle.getShort(2)); Assertions.assertEquals("4", resultSetFlapdoodle.getString(2)); } @Test @DisplayName("Test for get from ObjectId") void testGetId() throws SQLException { final String collection = "resultSetTestId"; final Document document = new Document(); final ObjectId id = new ObjectId(); document.append("_id", id); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(id.toString(), resultSetFlapdoodle.getString(1)); Assertions.assertEquals(id.toString(), resultSetFlapdoodle.getObject(1)); } @Test @DisplayName("Test for get from Boolean") void testGetBoolean() throws SQLException { final String collection = "resultSetTestBoolean"; final Document document = Document.parse("{\"_id\": \"key1\", \"field\": false}"); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals("false", resultSetFlapdoodle.getString(2)); Assertions.assertFalse(resultSetFlapdoodle.getBoolean(2)); Assertions.assertEquals(false, resultSetFlapdoodle.getObject(2)); } @Test @DisplayName("Test for get from Date") void testGetDate() throws SQLException { final String collection = "resultSetTestDate"; final Document document = Document.parse("{\"_id\": \"key1\"}"); final BsonDateTime date = new BsonDateTime(100000); document.append("date", date); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(new Date(date.getValue()), resultSetFlapdoodle.getDate(2)); Assertions.assertEquals(new Timestamp(date.getValue()), resultSetFlapdoodle.getTimestamp(2)); Assertions.assertEquals(new Time(date.getValue()), resultSetFlapdoodle.getTime(2)); Assertions.assertEquals(new Date(date.getValue()), resultSetFlapdoodle.getObject(2)); Assertions.assertEquals( new Date(date.getValue()), resultSetFlapdoodle.getDate(2, Calendar.getInstance(TimeZone.getTimeZone("UTC")))); } @Test @DisplayName("Test for get from Regex") void testGetRegex() throws SQLException { final String collection = "resultSetTestRegex"; final Document document = Document.parse("{\"_id\": \"key1\"}"); final BsonRegularExpression regex = new BsonRegularExpression("^example"); document.append("regex", regex); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals(regex.toString(), resultSetFlapdoodle.getString(2)); Assertions.assertEquals(regex.toString(), resultSetFlapdoodle.getObject(2)); } @Test @DisplayName("Test for get from Min/Max key") void testGetMinMaxKey() throws SQLException { final String collection = "resultSetTestMinMax"; final Document document = Document.parse("{\"_id\": \"key1\"}"); final BsonMaxKey max = new BsonMaxKey(); final BsonMinKey min = new BsonMinKey(); document.append("max", max); document.append("min", min); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals("MaxKey", resultSetFlapdoodle.getString(2)); Assertions.assertEquals("MinKey", resultSetFlapdoodle.getString(3)); } @Test @DisplayName("Test for get from timestamp") void testGetTimestamp() throws SQLException { final String collection = "resultSetTestTimestamp"; final Document document = Document.parse("{\"_id\": \"key1\"}"); final Instant dateTime = Instant.now(); final BsonTimestamp timestamp = new BsonTimestamp((int) dateTime.getEpochSecond(), 1); document.append("timestamp", timestamp); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertEquals( new Timestamp(TimeUnit.SECONDS.toMillis(timestamp.getTime())).toString(), resultSetFlapdoodle.getString(2)); Assertions.assertEquals(new Timestamp(TimeUnit.SECONDS.toMillis(timestamp.getTime())), resultSetFlapdoodle.getObject(2)); Assertions.assertEquals( new Timestamp(TimeUnit.SECONDS.toMillis(timestamp.getTime())), resultSetFlapdoodle.getTimestamp(2)); Assertions.assertEquals( new Timestamp(TimeUnit.SECONDS.toMillis(timestamp.getTime())), resultSetFlapdoodle.getTimestamp(2, Calendar.getInstance(TimeZone.getTimeZone("UTC")))); } @Test @DisplayName("Test for get from binary") void testGetBinary() throws SQLException, IOException { final String collection = "resultSetTestBinary"; final Document document = Document.parse("{\"_id\": \"key1\"}"); final BsonBinary binary = new BsonBinary("123abc".getBytes(StandardCharsets.UTF_8)); document.append("binary", binary); client.getDatabase(DATABASE_NAME).getCollection(collection).insertOne(document); connection = DriverManager.getConnection(getJdbcConnectionString()); statement = connection.createStatement(); resultSetFlapdoodle = statement.executeQuery( String.format("SELECT * FROM \"%s\".\"%s\"", DATABASE_NAME, collection)); Assertions.assertTrue(resultSetFlapdoodle.next()); Assertions.assertArrayEquals(binary.getData(), resultSetFlapdoodle.getBytes(2)); Assertions.assertArrayEquals(binary.getData(), resultSetFlapdoodle.getBlob(2).getBytes(1,6)); Assertions.assertArrayEquals(binary.getData(), (byte[]) resultSetFlapdoodle.getObject(2)); Assertions.assertArrayEquals(binary.getData(), ByteStreams.toByteArray(resultSetFlapdoodle.getBinaryStream(2))); Assertions.assertEquals( BaseEncoding.base16().encode(binary.getData()), resultSetFlapdoodle.getString(2)); } private static String getJdbcConnectionString() { return String.format( CONNECTION_STRING_TEMPLATE, TEST_USER, TEST_PASSWORD, getMongoPort(), DATABASE_NAME, DocumentDbMetadataScanMethod.ALL.getName()); } }
4,503
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbQueryExecutorTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import lombok.SneakyThrows; import org.bson.Document; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingService; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.FETCH_SIZE_DEFAULT; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryExecutorTest extends DocumentDbFlapDoodleTest { private static final String DATABASE_NAME = "database"; private static final String COLLECTION_NAME = "testCollection"; private static final String TEST_USER = "user"; private static final String TEST_PASSWORD = "password"; private static final String QUERY = "SELECT COUNT(*) FROM \"database\".\"testCollection\""; private static final DocumentDbConnectionProperties VALID_CONNECTION_PROPERTIES = new DocumentDbConnectionProperties(); private static DocumentDbQueryExecutor executor; private static DocumentDbStatement statement; private ResultSet resultSet; @BeforeAll @SuppressFBWarnings( value = "HARD_CODE_PASSWORD", justification = "Hardcoded for test purposes only") void initialize() throws SQLException { // Add a valid users to the local MongoDB instance. createUser(DATABASE_NAME, TEST_USER, TEST_PASSWORD); VALID_CONNECTION_PROPERTIES.setUser(TEST_USER); VALID_CONNECTION_PROPERTIES.setPassword(TEST_PASSWORD); VALID_CONNECTION_PROPERTIES.setDatabase(DATABASE_NAME); VALID_CONNECTION_PROPERTIES.setTlsEnabled("false"); VALID_CONNECTION_PROPERTIES.setHostname("localhost:" + getMongoPort()); VALID_CONNECTION_PROPERTIES.setAllowDiskUseOption("enable"); prepareSimpleConsistentData(DATABASE_NAME, COLLECTION_NAME, 1, TEST_USER, TEST_PASSWORD); final DocumentDbConnection connection = new DocumentDbConnection(VALID_CONNECTION_PROPERTIES); executor = new MockQueryExecutor( statement, VALID_CONNECTION_PROPERTIES, null, 0, 0); statement = new DocumentDbStatement(connection, executor); } @AfterEach void afterAll() throws Exception { try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter( VALID_CONNECTION_PROPERTIES, null)) { schemaWriter.remove("id"); } if (resultSet != null) { resultSet.close(); } } /** Tests that canceling a query before it has been executed fails. */ @Test @DisplayName("Tests canceling a query without executing first.") public void testCancelQueryWithoutExecute() { final ExecutorService cancelThread = getCancelThread(); final Cancel cancel = launchCancelThread(0, statement, cancelThread); waitCancelToComplete(cancelThread); final SQLException exception = getCancelException(cancel); Assertions.assertNotNull(exception); Assertions.assertEquals( "Cannot cancel query, it is either completed or has not started.", exception.getMessage()); } /** * Tests that canceling a query while it is executing succeeds and that the query execution then * fails because it has been canceled. */ @Test @DisplayName("Tests canceling a query while execution is in progress.") public void testCancelQueryWhileExecuteInProgress() { // Wait 100 milliseconds before attempting to cancel. final ExecutorService cancelThread = getCancelThread(); final Cancel cancel = launchCancelThread(100, statement, cancelThread); // Check that query was canceled and cancel thread did not throw exception. Assertions.assertEquals( "Query has been canceled.", Assertions.assertThrows(SQLException.class, () -> resultSet = statement.executeQuery(QUERY)) .getMessage()); waitCancelToComplete(cancelThread); Assertions.assertNull(cancel.getException(), () -> cancel.getException().getMessage()); } /** Tests that canceling a query from two different threads. */ @Test @DisplayName("Tests canceling a query from 2 different threads simultaneously.") public void testCancelQueryFromTwoThreads() { // Let 2 threads both wait for 100 milliseconds before attempting to cancel. final ExecutorService cancelThread1 = getCancelThread(); final ExecutorService cancelThread2 = getCancelThread(); final Cancel cancel1 = launchCancelThread(100, statement, cancelThread1); final Cancel cancel2 = launchCancelThread(300, statement, cancelThread2); // Check that query was canceled. Assertions.assertEquals( "Query has been canceled.", Assertions.assertThrows(SQLException.class, () -> resultSet = statement.executeQuery(QUERY)) .getMessage()); waitCancelToComplete(cancelThread1); waitCancelToComplete(cancelThread2); // Check that at-least one thread succeed. final SQLException e1 = getCancelException(cancel1); final SQLException e2 = getCancelException(cancel2); final List<SQLException> exceptions = new ArrayList<>(Arrays.asList(e1, e2)); Assertions.assertTrue(exceptions.stream().anyMatch(Objects::isNull)); } /** Tests that canceling a query after execution has already completed fails. */ @Test @DisplayName("Tests canceling query after execution already completes.") public void testCancelQueryAfterExecuteComplete() { // Execute query. Assertions.assertDoesNotThrow(() -> statement.execute(QUERY)); // Launch cancel after execution has already completed. final ExecutorService cancelThread = getCancelThread(); final Cancel cancel = launchCancelThread(0, statement, cancelThread); waitCancelToComplete(cancelThread); final SQLException exception = getCancelException(cancel); Assertions.assertNotNull(exception); Assertions.assertEquals( "Cannot cancel query, it is either completed or has not started.", exception.getMessage()); } /** Tests canceling a query after it has already been canceled. */ @Test @DisplayName("Tests canceling a query after it has already been canceled.") public void testCancelQueryTwice() { // Wait 100 milliseconds before attempting to cancel. final ExecutorService cancelThread1 = getCancelThread(); final Cancel cancel1 = launchCancelThread(100, statement, cancelThread1); // Check that query was canceled and cancel thread did not throw exception. Assertions.assertEquals( "Query has been canceled.", Assertions.assertThrows(SQLException.class, () -> resultSet = statement.executeQuery(QUERY)) .getMessage()); waitCancelToComplete(cancelThread1); Assertions.assertNull(cancel1.getException(), () -> cancel1.getException().getMessage()); // Attempt to cancel again. final ExecutorService cancelThread2 = getCancelThread(); final Cancel cancel2 = launchCancelThread(1, statement, cancelThread2); waitCancelToComplete(cancelThread2); final SQLException exception = getCancelException(cancel2); Assertions.assertNotNull(exception); Assertions.assertEquals( "Cannot cancel query, it is either completed or has not started.", exception.getMessage()); } /** Tests getting and setting the query timeout. **/ @Test @DisplayName("Tests getting and setting the query timeout.") public void testGetSetQueryTimeout() throws SQLException { Assertions.assertDoesNotThrow(() -> statement.setQueryTimeout(30)); Assertions.assertEquals(30, statement.getQueryTimeout()); } /** Tests setting default fetch size with valid size. **/ @Test @DisplayName("Tests setting the default fetch size with valid size.") public void testSetValidDefaultFetchSize() throws SQLException { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setDefaultFetchSize("123"); final DocumentDbConnection connection = new DocumentDbConnection(properties); final DocumentDbStatement validFetchSizeStatement = new DocumentDbStatement(connection); Assertions.assertEquals( 123, validFetchSizeStatement.getFetchSize(), "Custom fetch size should be used if valid."); } /** Tests setting default fetch size with invalid size. **/ @Test @DisplayName("Tests setting the default fetch size with invalid size.") public void testSetInvalidDefaultFetchSize() throws SQLException { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setDefaultFetchSize("123a"); final DocumentDbConnection connection = new DocumentDbConnection(properties); final DocumentDbStatement invalidFetchSizeStatement = new DocumentDbStatement(connection); Assertions.assertEquals( FETCH_SIZE_DEFAULT, invalidFetchSizeStatement.getFetchSize(), "Default fetch size should be used if invalid."); } /** Tests setting the allow disk usage option. **/ @Test @DisplayName("Tests setting the allow disk usage option.") public void testAllowDiskUse() throws SQLException { Assertions.assertEquals(DocumentDbAllowDiskUseOption.ENABLE, executor.getAllowDiskUse()); executor.setAllowDiskUse(DocumentDbAllowDiskUseOption.DEFAULT); Assertions.assertEquals(DocumentDbAllowDiskUseOption.DEFAULT, executor.getAllowDiskUse()); executor.setAllowDiskUse(DocumentDbAllowDiskUseOption.DISABLE); Assertions.assertEquals(DocumentDbAllowDiskUseOption.DISABLE, executor.getAllowDiskUse()); executor.setAllowDiskUse(DocumentDbAllowDiskUseOption.ENABLE); } private ExecutorService getCancelThread() { return Executors.newSingleThreadExecutor( new ThreadFactoryBuilder().setNameFormat("cancelThread").setDaemon(true).build()); } private Cancel launchCancelThread( final int waitTime, final Statement statement, final ExecutorService cancelThread) { final Cancel cancel1 = new Cancel(statement, waitTime); cancelThread.execute(cancel1); return cancel1; } private SQLException getCancelException(final Cancel cancel) { return cancel.getException(); } @SneakyThrows private void waitCancelToComplete(final ExecutorService cancelThread) { cancelThread.awaitTermination(10000, TimeUnit.MILLISECONDS); } /** Class to cancel query in a separate thread. */ private static class Cancel implements Runnable { private final Statement statement; private final int waitTime; private SQLException exception; Cancel(final Statement statement, final int waitTime) { this.statement = statement; this.waitTime = waitTime; } @SneakyThrows @Override public void run() { try { Thread.sleep(waitTime); statement.cancel(); } catch (final SQLException e) { exception = e; } } /** * Function to get exception if the run call generated one. */ public SQLException getException() { return exception; } } /** * Identical to actual DocumentDbQueryExecutor but overrides runQuery, so we can simulate a * long-running query with find instead. */ private static class MockQueryExecutor extends DocumentDbQueryExecutor { MockQueryExecutor( final Statement statement, final DocumentDbConnectionProperties connectionProperties, final DocumentDbQueryMappingService queryMapper, final int queryTimeoutSecs, final int maxFetchSize) { super(statement, connectionProperties, queryMapper, queryTimeoutSecs, maxFetchSize); } @Override protected java.sql.ResultSet runQuery(final String sql) throws SQLException { try (MongoClient client = VALID_CONNECTION_PROPERTIES.createMongoClient()) { final MongoDatabase database = client.getDatabase(VALID_CONNECTION_PROPERTIES.getDatabase()); final MongoCollection<Document> collection = database.getCollection( COLLECTION_NAME); // We use the $where operator to sleep for 5000 milliseconds. This operator // can only be used with find(). final Document whereDoc = new Document("$where", "function(){ return sleep(5000) || true;}"); final FindIterable<Document> iterable = collection.find(whereDoc) .comment(getQueryId()); final MongoCursor<Document> iterator = iterable.iterator(); final JdbcColumnMetaData column = JdbcColumnMetaData.builder().columnLabel("EXPR$0").ordinal(0).build(); return new DocumentDbResultSet( statement, iterator, ImmutableList.of(column), ImmutableList.of("EXPR$0")); } } } }
4,504
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbStatementStringTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; public class DocumentDbStatementStringTest extends DocumentDbStatementTest { @DisplayName("Test that queries selecting a substring work with 2 and 3 arguments.") @ParameterizedTest(name = "testQuerySubstring - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQuerySubstring(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testSelectQuerySubstring"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"abcdefg\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"uvwxyz\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); final BsonDocument doc6 = BsonDocument.parse("{\"_id\": 106,\n" + "\"field\": \"ab\"}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5, doc6}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); // Test SUBSTRING(%1, %2, %3) format. final ResultSet resultSet1 = statement.executeQuery( String.format("SELECT SUBSTRING(\"field\", 1, 3) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet1); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("abc", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("uvw", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("ab", resultSet1.getString(1)); Assertions.assertFalse(resultSet1.next()); // Test SUBSTRING(%1, %2) format. final ResultSet resultSet2 = statement.executeQuery( String.format("SELECT SUBSTRING(\"field\", 1) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet2); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals("abcdefg", resultSet2.getString(1)); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals("uvwxyz", resultSet2.getString(1)); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals("", resultSet2.getString(1)); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals("", resultSet2.getString(1)); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals("", resultSet2.getString(1)); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals("ab", resultSet2.getString(1)); Assertions.assertFalse(resultSet2.next()); } } @DisplayName("Test queries calling CHAR_LENGTH().") @ParameterizedTest(name = "testQueryCharLength - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQueryCharLength(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "tesQueryCharLength"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"abcdefg\"}"); // 7 final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); // 2 final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); // 0 final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); // null final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); // null insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet = statement.executeQuery( String.format("SELECT CHAR_LENGTH(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals(7, resultSet.getInt(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals(2, resultSet.getInt(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals(0, resultSet.getInt(1)); Assertions.assertFalse(resultSet.wasNull()); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getObject(1)); Assertions.assertTrue(resultSet.wasNull()); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getObject(1)); Assertions.assertTrue(resultSet.wasNull()); Assertions.assertFalse(resultSet.next()); } } @DisplayName("Test queries calling POSITION with 2 and 3 arguments.") @ParameterizedTest(name = "testQueryPosition - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQueryPosition(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testQueryPosition"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"BanaNa\"}"); // Contains "na" string in 2 places final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"Apple\"}"); // Does not contain "na" final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); // Empty string - does not contain "na" final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); // Null string final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); // Missing string insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); // Test POSITION(%1 IN %2 FROM %3) format with non-null search substring. final ResultSet resultSet1 = statement.executeQuery( String.format("SELECT POSITION('na' IN \"field\" FROM 4) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet1); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals(5, resultSet1.getInt(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals(0, resultSet1.getInt(1)); Assertions.assertFalse(resultSet1.wasNull()); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals(0, resultSet1.getInt(1)); Assertions.assertFalse(resultSet1.wasNull()); Assertions.assertTrue(resultSet1.next()); Assertions.assertNull(resultSet1.getObject(1)); Assertions.assertTrue(resultSet1.wasNull()); Assertions.assertTrue(resultSet1.next()); Assertions.assertNull(resultSet1.getObject(1)); Assertions.assertTrue(resultSet1.wasNull()); Assertions.assertFalse(resultSet1.next()); // Test POSITION(%1 IN %2) format with non-null search substring. final ResultSet resultSet2 = statement.executeQuery( String.format("SELECT POSITION('na' IN \"field\") FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet2); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals(3, resultSet2.getInt(1)); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals(0, resultSet2.getInt(1)); Assertions.assertFalse(resultSet2.wasNull()); Assertions.assertTrue(resultSet2.next()); Assertions.assertEquals(0, resultSet2.getInt(1)); Assertions.assertFalse(resultSet2.wasNull()); Assertions.assertTrue(resultSet2.next()); Assertions.assertNull(resultSet2.getObject(1)); Assertions.assertTrue(resultSet2.wasNull()); Assertions.assertTrue(resultSet2.next()); Assertions.assertNull(resultSet2.getObject(1)); Assertions.assertTrue(resultSet2.wasNull()); Assertions.assertFalse(resultSet2.next()); // Test POSITION(%1 IN %2 FROM %3) format with negative start index. // Returns 0 unless strings are null in which case returns null. final ResultSet resultSet3 = statement.executeQuery( String.format("SELECT POSITION('na' IN \"field\" FROM -4) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet3); Assertions.assertTrue(resultSet3.next()); Assertions.assertEquals(0, resultSet3.getInt(1)); Assertions.assertFalse(resultSet3.wasNull()); Assertions.assertTrue(resultSet3.next()); Assertions.assertEquals(0, resultSet3.getInt(1)); Assertions.assertFalse(resultSet3.wasNull()); Assertions.assertTrue(resultSet3.next()); Assertions.assertEquals(0, resultSet3.getInt(1)); Assertions.assertFalse(resultSet3.wasNull()); Assertions.assertTrue(resultSet3.next()); Assertions.assertNull(resultSet3.getObject(1)); Assertions.assertTrue(resultSet3.wasNull()); Assertions.assertTrue(resultSet3.next()); Assertions.assertNull(resultSet3.getObject(1)); Assertions.assertTrue(resultSet3.wasNull()); Assertions.assertFalse(resultSet3.next()); // Test POSITION(%1 IN %2) format with null search substring. Always returns null. final ResultSet resultSet4 = statement.executeQuery( String.format("SELECT POSITION(NULL IN \"field\") FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet4); while (resultSet4.next()) { Assertions.assertNull(resultSet4.getObject(1)); Assertions.assertTrue(resultSet4.wasNull()); } } } @DisplayName("Test queries using UPPER().") @ParameterizedTest(name = "testQueryUpper - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQueryUpper(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testQueryUpper"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello World!\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet = statement.executeQuery( String.format("SELECT UPPER(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("HELLO WORLD!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("寿司", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getString(1)); Assertions.assertFalse(resultSet.next()); } } @DisplayName("Test queries using LOWER().") @ParameterizedTest(name = "testQueryLower - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQueryLower(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testQueryLower"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello World!\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet = statement.executeQuery( String.format("SELECT LOWER(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("hello world!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("寿司", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getString(1)); Assertions.assertFalse(resultSet.next()); } } @DisplayName("Test queries using the || operator.") @ParameterizedTest(name = "testQueryConcatOperator - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testConcatOperator(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testConcatOperator"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet = statement.executeQuery( String.format("SELECT 'I want to say: ' || \"field\" || '!' FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: Hello!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: 寿司!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: !", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertNull(resultSet.getString(1)); Assertions.assertFalse(resultSet.next()); } } @DisplayName("Test queries using CONCAT().") @ParameterizedTest(name = "testQueryConcatFunction - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testConcatFunction(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testConcatFunction"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet = statement.executeQuery( String.format("SELECT CONCAT('I want to say: ', \"field\", '!') FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: Hello!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: 寿司!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: !", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: !", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I want to say: !", resultSet.getString(1)); Assertions.assertFalse(resultSet.next()); } } @DisplayName("Test query using combination of different string functions.") @ParameterizedTest(name = "testQueryCombinedStringFunctions - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testCombinedStringFunctions(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testCombinedStringFunctions"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet = statement.executeQuery( String.format("SELECT UPPER(CONCAT('I want to say: ', \"field\", '!')) " + "FROM \"%s\".\"%s\"" + "WHERE POSITION('o' IN \"field\") = CHAR_LENGTH(\"field\")", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet); // Returns 2 rows: the 1st row with 'Hello' (a proper match) // and the 3rd row with empty string since its length is 0 and none found is also 0. Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I WANT TO SAY: HELLO!", resultSet.getString(1)); Assertions.assertTrue(resultSet.next()); Assertions.assertEquals("I WANT TO SAY: !", resultSet.getString(1)); Assertions.assertFalse(resultSet.next()); } } @DisplayName("Test queries using LEFT().") @ParameterizedTest(name = "testQueryLeft - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQueryLeft(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testQueryLeft"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello World!\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet1 = statement.executeQuery( String.format("SELECT LEFT(\"field\", 5) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet1); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("Hello", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("寿司", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertNull(resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertNull(resultSet1.getString(1)); Assertions.assertFalse(resultSet1.next()); // A negative length always results in null. final ResultSet resultSet2 = statement.executeQuery( String.format("SELECT LEFT(\"field\", -2) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet2); while (resultSet2.next()) { Assertions.assertNull(resultSet2.getObject(1)); Assertions.assertTrue(resultSet2.wasNull()); } } } @DisplayName("Test queries using RIGHT().") @ParameterizedTest(name = "testQueryRight - [{index}] - {arguments}") @MethodSource({"getTestEnvironments"}) void testQueryRight(final DocumentDbTestEnvironment testEnvironment) throws SQLException { setTestEnvironment(testEnvironment); final String tableName = "testQueryRight"; final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101,\n" + "\"field\": \"Hello World!\"}"); final BsonDocument doc2 = BsonDocument.parse("{\"_id\": 102,\n" + "\"field\": \"寿司\"}"); final BsonDocument doc3 = BsonDocument.parse("{\"_id\": 103,\n" + "\"field\": \"\"}"); final BsonDocument doc4 = BsonDocument.parse("{\"_id\": 104, \n" + "\"field\": null}"); final BsonDocument doc5 = BsonDocument.parse("{\"_id\": 105}"); insertBsonDocuments(tableName, new BsonDocument[]{doc1, doc2, doc3, doc4, doc5}); try (Connection connection = getConnection()) { final Statement statement = getDocumentDbStatement(connection); final ResultSet resultSet1 = statement.executeQuery( String.format("SELECT RIGHT(\"field\", 5) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet1); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("orld!", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("寿司", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertEquals("", resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertNull(resultSet1.getString(1)); Assertions.assertTrue(resultSet1.next()); Assertions.assertNull(resultSet1.getString(1)); Assertions.assertFalse(resultSet1.next()); // A negative length always results in null. final ResultSet resultSet2 = statement.executeQuery( String.format("SELECT RIGHT(\"field\", -2) FROM \"%s\".\"%s\"", getDatabaseName(), tableName)); Assertions.assertNotNull(resultSet2); while (resultSet2.next()) { Assertions.assertNull(resultSet2.getObject(1)); Assertions.assertTrue(resultSet2.wasNull()); } } } }
4,505
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbPooledConnectionTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.sql.Connection; public class DocumentDbPooledConnectionTest { @Mock private Connection mockConnection; @BeforeEach void init() { MockitoAnnotations.openMocks(this); } @Test @DisplayName("Tests that a non-null instance of DocumentDbPooledConnection is returned from constructor.") void testDocumentDbPooledConnection() { final DocumentDbPooledConnection pooledConnection = new DocumentDbPooledConnection(mockConnection); Assertions.assertNotNull(pooledConnection); } }
4,506
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbConnectionTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.security.SecureRandom; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbConnectionTest extends DocumentDbFlapDoodleTest { private static final String HOSTNAME = "localhost"; private static final String USERNAME = "user"; private static final String PASSWORD = "password"; private static final String DATABASE = "testDb"; private static final String COLLECTION_NAME = "COLLECTION"; private static final DocumentDbConnectionProperties VALID_CONNECTION_PROPERTIES = new DocumentDbConnectionProperties(); private static Connection basicConnection; private static final String DOC_DB_USER_PROPERTY = "DOC_DB_USER"; private static final String DOC_DB_HOST_PROPERTY = "DOC_DB_HOST"; private static final String DOC_DB_PRIV_KEY_FILE_PROPERTY = "DOC_DB_PRIV_KEY_FILE"; /** Initializes the test class. */ @BeforeAll public static void initialize() throws SQLException { VALID_CONNECTION_PROPERTIES.setUser(USERNAME); VALID_CONNECTION_PROPERTIES.setPassword(PASSWORD); VALID_CONNECTION_PROPERTIES.setDatabase(DATABASE); VALID_CONNECTION_PROPERTIES.setTlsEnabled("false"); VALID_CONNECTION_PROPERTIES.setHostname(HOSTNAME + ":" + getMongoPort()); // Add 1 valid user so we can successfully authenticate. createUser(DATABASE, USERNAME, PASSWORD); prepareSimpleConsistentData(DATABASE, COLLECTION_NAME, 5, USERNAME, PASSWORD); final String connectionString = String.format( "jdbc:documentdb://%s:%s@%s:%s/%s?tls=false", USERNAME, PASSWORD, HOSTNAME, getMongoPort(), DATABASE); basicConnection = DriverManager.getConnection(connectionString); // Ensure we have the initial schema. Assertions.assertNotNull(basicConnection.getMetaData()); } @AfterAll static void afterAll() throws Exception { try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter( VALID_CONNECTION_PROPERTIES, null)) { schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); } basicConnection.close(); } /** * Tests isValid() when connected to a local MongoDB instance. * * @throws SQLException if an error occurs instantiating a Connection. */ @Test void testIsValidWhenConnectionIsValid() throws SQLException { try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, VALID_CONNECTION_PROPERTIES)) { // NOTE: Observed approximate 10 … 11 seconds delay before first heartbeat is returned. final int timeoutSeconds = 15; Assertions.assertTrue(connection.isValid(timeoutSeconds)); } } /** * Tests isValid() when connected to a local MongoDB instance but timeout is negative. * * @throws SQLException if an error occurs instantiating a Connection. */ @Test void testIsValidWhenTimeoutIsNegative() throws SQLException { try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, VALID_CONNECTION_PROPERTIES)) { Assertions.assertThrows(SQLException.class, () -> connection.isValid(-1)); } } /** * Tests close() when connected to a local mongoDB instance and Connection is not yet closed. * * @throws SQLException if an error occurs instantiating a Connection. */ @Test void testClose() throws SQLException { final DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, VALID_CONNECTION_PROPERTIES); Assertions.assertFalse(connection.isClosed()); connection.close(); Assertions.assertTrue(connection.isClosed()); } /** * Tests constructor when passed valid options. * * @throws SQLException if an error occurs instantiating a Connection. */ @Test void testConnectionWithValidOptions() throws SQLException { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setApplicationName("test"); properties.setLoginTimeout("10"); properties.setRetryReadsEnabled("false"); properties.setReadPreference(DocumentDbReadPreference.PRIMARY.getName()); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { Assertions.assertNotNull(connection); } } /** * Tests constructor when passed invalid options. Invalid options are ignored. * * @throws SQLException if an error occurs instantiating a Connection. */ @Test void testConnectionWithInvalidOptions() throws SQLException { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setReadPreference("invalidReadPreference"); properties.setTlsEnabled("invalidBoolean"); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { Assertions.assertNotNull(connection); } } /** Tests constructor when passed an invalid database name. */ @Test void testConnectionWithInvalidDatabase() { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setDatabase(" "); Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)); } /** Tests constructor when passed invalid credentials. */ @Test void testConnectionWithInvalidCredentials() { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setUser("invalidUser"); Assertions.assertTrue( Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) .getMessage() .contains("Authorization failed for user")); } /** Tests constructor when authenticating with non-default database. */ @Test void testConnectionWithAuthenticationDatabase() throws SQLException { // Create another user for a different database on that same database (instead of admin) final String otherUser = "other"; final String otherDatabase = "other"; final String otherUserPassword = "password"; createUser(otherDatabase, otherDatabase, otherUser, otherUserPassword); // Attempt to authenticate with this user with default authentication database admin. final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); properties.setUser(otherUser); properties.setPassword(otherUserPassword); Assertions.assertTrue( Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) .getMessage() .contains("Authorization failed for user")); // Attempt to authenticate with this user after setting correct authentication database. properties.setDefaultAuthenticationDatabase(otherDatabase); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { Assertions.assertNotNull(connection); } } /** * Test for connection.getSchema() and getCatalog */ @Test @DisplayName("Tests that catalog is null, and schema is equal to the database name.") void testGetMetadataSchema() throws SQLException { final String catalog = basicConnection.getCatalog(); Assertions.assertNull(catalog); final String schema = basicConnection.getSchema(); Assertions.assertEquals(DATABASE, schema); } /** * Test for connection.getMetadata() for basic properties. */ @Test @DisplayName("Tests simple metadata of a database connection.") void testGetMetadata() throws SQLException { final DocumentDbDatabaseMetaData metadata = (DocumentDbDatabaseMetaData) basicConnection.getMetaData(); Assertions.assertEquals("", metadata.getSQLKeywords()); Assertions.assertNotNull(metadata); final String connectionString = String.format( "jdbc:documentdb://%s@%s:%s/%s?tls=false", USERNAME, HOSTNAME, getMongoPort(), DATABASE); Assertions.assertEquals(connectionString, metadata.getURL()); Assertions.assertEquals(USERNAME, metadata.getUserName()); final ResultSet procedures = metadata.getProcedures(null, null, null); Assertions.assertFalse(procedures.next()); final ResultSet catalogs = metadata.getCatalogs(); // No records indicate we don't support/use catalogs. Assertions.assertFalse(catalogs.next()); final ResultSet columnPrivileges = metadata.getColumnPrivileges(null, null, null, null); Assertions.assertFalse(columnPrivileges.next()); } /** * Tests getting primary keys from database. */ @Test @DisplayName("Tests that metadata can return primary keys.") void testGetPrimaryKeys() throws SQLException { final ResultSet primaryKeys = basicConnection.getMetaData() .getPrimaryKeys(null, null, COLLECTION_NAME); Assertions.assertTrue(primaryKeys.next()); Assertions.assertEquals("COLLECTION__id", primaryKeys.getString(4)); Assertions.assertEquals(1, primaryKeys.getShort(5)); } /** * Tests metadata for tables of database. */ @Test @DisplayName("Tests the database metadata contains the expected tables.") void testGetMetadataTables() throws SQLException { final ResultSet tables = basicConnection.getMetaData().getTables(null, null, COLLECTION_NAME, null); Assertions.assertTrue(tables.next()); // Test by column index Assertions.assertNull(tables.getString(1)); Assertions.assertEquals("testDb", tables.getString(2)); Assertions.assertEquals("COLLECTION", tables.getString(3)); Assertions.assertEquals("TABLE", tables.getString(4)); // Test by column label, case-insensitive Assertions.assertNull(tables.getString("TABLE_cat")); Assertions.assertEquals("testDb", tables.getString("TABLE_SCHEM")); Assertions.assertEquals("COLLECTION", tables.getString("table_name")); Assertions.assertEquals("TABLE", tables.getString("table_TYPE")); Assertions.assertFalse(tables.next()); } @Test @DisplayName("Tests that a statement can be created given valid type and concurrency") void testStatement() throws SQLException { try (Statement statement = basicConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)) { Assertions.assertNotNull(statement); } try (Statement ignored = basicConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)) { Assertions.fail("Exception should be thrown."); } catch (Exception e) { Assertions.assertEquals(SqlError.lookup(SqlError.UNSUPPORTED_RESULT_SET_TYPE),e.getMessage()); } try (Statement ignored = basicConnection.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) { Assertions.fail("Exception should be thrown."); } catch (Exception e) { Assertions.assertEquals(SqlError.lookup(SqlError.UNSUPPORTED_RESULT_SET_TYPE),e.getMessage()); } } @Test @DisplayName("Tests that a prepared statement can be created given valid type and concurrency") void testPreparedStatement() throws SQLException { final String sql = "SELECT * FROM " + COLLECTION_NAME; try (PreparedStatement statement = basicConnection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)) { Assertions.assertNotNull(statement); } try (PreparedStatement ignored = basicConnection.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)) { Assertions.fail("Exception should be thrown."); } catch (Exception e) { Assertions.assertEquals(SqlError.lookup(SqlError.UNSUPPORTED_RESULT_SET_TYPE), e.getMessage()); } try (PreparedStatement ignored = basicConnection.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) { Assertions.fail("Exception should be thrown."); } catch (Exception e) { Assertions.assertEquals(SqlError.lookup(SqlError.UNSUPPORTED_RESULT_SET_TYPE), e.getMessage()); } } /** * Tests metadata for table types. */ @Test @DisplayName("Tests that the table types table contains table and view.") void testMetadataGetTableTypes() throws SQLException { final ResultSet tableTypes = basicConnection.getMetaData().getTableTypes(); Assertions.assertEquals("TABLE_TYPE", tableTypes.getMetaData().getColumnName(1)); Assertions.assertTrue(tableTypes.next()); Assertions.assertEquals("TABLE", tableTypes.getString(1)); Assertions.assertFalse(tableTypes.next()); } @ParameterizedTest(name = "testSshTunnelOptions - [{index}] - {arguments}") @DisplayName("Tests SSH tunnel options") @MethodSource("getDocumentDb40SshTunnelEnvironmentSourceOrNull") void testSshTunnelOptions(final DocumentDbTestEnvironment environment) throws SQLException { // NOTE: a "null" environment means it isn't configured to run. So bypass. if (environment == null) { return; } final DocumentDbConnectionProperties properties = getInternalSSHTunnelProperties(environment); try (Connection connection = DriverManager.getConnection("jdbc:documentdb:", properties)) { Assertions.assertTrue(connection instanceof DocumentDbConnection); Assertions.assertTrue(connection.isValid(10)); } } @ParameterizedTest(name = "testSshTunnelInvalidOptions - [{index}] - {arguments}") @DisplayName("Tests SSH tunnel with invalid options") @MethodSource("getDocumentDb40SshTunnelEnvironmentSourceOrNull") void testSshTunnelInvalidOptions(final DocumentDbTestEnvironment environment) throws SQLException { // NOTE: a "null" environment means it isn't configured to run. So bypass. if (environment == null) { return; } final DocumentDbConnectionProperties properties = getInternalSSHTunnelProperties(environment); // Test incorrect private key file path. properties.setSshPrivateKeyFile("~/certs/other.pem"); Assertions.assertEquals(SqlError.lookup(SqlError.SSH_PRIVATE_KEY_FILE_NOT_FOUND, "~/certs/other.pem"), Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection("jdbc:documentdb:", properties)).getMessage()); // Test incorrect known hosts path. properties.setSshPrivateKeyFile(System.getenv(DOC_DB_PRIV_KEY_FILE_PROPERTY)); properties.setSshStrictHostKeyChecking("true"); properties.setSshKnownHostsFile("~/.ssh/unknown_hosts"); Assertions.assertEquals(SqlError.lookup(SqlError.KNOWN_HOSTS_FILE_NOT_FOUND, "~/.ssh/unknown_hosts"), Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection("jdbc:documentdb:", properties)).getMessage()); // Test where TLSAllowInvalidHostnames is disabled - host will not match certificate. properties.setSshStrictHostKeyChecking("false"); properties.setTlsAllowInvalidHostnames("false"); Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection("jdbc:documentdb:", properties)); } @Test() @DisplayName("Tests refreshing the schema from a connection.") void testRefreshSchema() throws Exception { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(VALID_CONNECTION_PROPERTIES); int expectedVersion = 1; try (DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader( VALID_CONNECTION_PROPERTIES, null)) { final DocumentDbSchema schema = schemaReader.read(DocumentDbSchema.DEFAULT_SCHEMA_NAME); Assertions.assertNotNull(schema); Assertions.assertEquals(expectedVersion, schema.getSchemaVersion()); } properties.setRefreshSchema("true"); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { final int timeoutSeconds = 15; Assertions.assertTrue(connection.isValid(timeoutSeconds)); Assertions.assertNotNull(connection.getDatabaseMetadata()); } expectedVersion++; try (DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader( VALID_CONNECTION_PROPERTIES, null)) { final DocumentDbSchema schema = schemaReader.read(DocumentDbSchema.DEFAULT_SCHEMA_NAME); Assertions.assertNotNull(schema); Assertions.assertEquals(expectedVersion, schema.getSchemaVersion()); } properties.setRefreshSchema("true"); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { final int timeoutSeconds = 15; Assertions.assertTrue(connection.isValid(timeoutSeconds)); Assertions.assertNotNull(connection.getDatabaseMetadata()); } expectedVersion++; try (DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader( VALID_CONNECTION_PROPERTIES, null)) { final DocumentDbSchema schema = schemaReader.read(DocumentDbSchema.DEFAULT_SCHEMA_NAME); Assertions.assertNotNull(schema); Assertions.assertEquals(expectedVersion, schema.getSchemaVersion()); } properties.setRefreshSchema("true"); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { final int timeoutSeconds = 15; Assertions.assertTrue(connection.isValid(timeoutSeconds)); Assertions.assertNotNull(connection.getDatabaseMetadata()); } expectedVersion++; try (DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader( VALID_CONNECTION_PROPERTIES, null)) { final DocumentDbSchema schema = schemaReader.read(DocumentDbSchema.DEFAULT_SCHEMA_NAME); Assertions.assertNotNull(schema); Assertions.assertEquals(expectedVersion, schema.getSchemaVersion()); } properties.setRefreshSchema("false"); try (DocumentDbConnection connection = (DocumentDbConnection) DriverManager.getConnection( DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties)) { final int timeoutSeconds = 15; Assertions.assertTrue(connection.isValid(timeoutSeconds)); Assertions.assertNotNull(connection.getDatabaseMetadata()); } try (DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader( VALID_CONNECTION_PROPERTIES, null)) { final DocumentDbSchema schema = schemaReader.read(DocumentDbSchema.DEFAULT_SCHEMA_NAME); Assertions.assertNotNull(schema); Assertions.assertEquals(expectedVersion, schema.getSchemaVersion()); } // Always false unless value is non-null and case-insensitive equal to "true". properties.setRefreshSchema("notTrue"); Assertions.assertFalse(properties.getRefreshSchema()); } @ParameterizedTest(name = "testMultiProcessConnections - [{index}] - {arguments}") @MethodSource("getDocumentDb40SshTunnelEnvironmentSourceOrNull") void testMultiProcessConnections(final DocumentDbTestEnvironment environment) throws Exception { if (environment == null) { return; } environment.start(); final int numberOfConnections = 100; final List<Runner> runners = new ArrayList<>(); final List<Thread> threads = new ArrayList<>(); final DocumentDbConnectionProperties internalSSHTunnelProperties = getInternalSSHTunnelProperties(environment); for (int i = 0; i < numberOfConnections; i++) { final Runner runner = new Runner(internalSSHTunnelProperties); final Thread thread = new Thread(runner); runners.add(runner); threads.add(thread); } for (final Thread thread : threads) { thread.start(); } while (threads.size() > 0) { TimeUnit.MILLISECONDS.sleep(100); for (int i = threads.size() - 1; i >= 0; i--) { final Thread thread = threads.get(i); if (!thread.isAlive()) { thread.join(); threads.remove(i); } } } for (final Runner runner : runners) { final Queue<Exception> exceptions = runner.exceptions; Assertions.assertEquals(0, exceptions.size(), () -> exceptions.stream() .map(e -> e.getMessage()) .collect(Collectors.joining("; "))); } } private Stream<DocumentDbTestEnvironment> getDocumentDb40SshTunnelEnvironmentSourceOrNull() { if (DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream() .anyMatch(e -> e == DocumentDbTestEnvironmentFactory .getDocumentDb40SshTunnelEnvironment())) { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream() .filter(e -> e == DocumentDbTestEnvironmentFactory .getDocumentDb40SshTunnelEnvironment()); } else { return Stream.of((DocumentDbTestEnvironment) null); } } /** * Gets the connection properties to test an internal SSH tunnel. * * @param environment the test environment. * @return a {@link DocumentDbConnectionProperties} object. * @throws SQLException if any of the properties cannot be parsed correctly. */ public static DocumentDbConnectionProperties getInternalSSHTunnelProperties( final DocumentDbTestEnvironment environment) throws SQLException { final String docDbRemoteHost = System.getenv(DOC_DB_HOST_PROPERTY); final String docDbSshUserAndHost = System.getenv(DOC_DB_USER_PROPERTY); final String docDbPrivKeyFile = System.getenv(DOC_DB_PRIV_KEY_FILE_PROPERTY); final int userSeparatorIndex = docDbSshUserAndHost.indexOf('@'); final String sshUser = docDbSshUserAndHost.substring(0, userSeparatorIndex); final String sshHostname = docDbSshUserAndHost.substring(userSeparatorIndex + 1); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(environment.getJdbcConnectionString()); properties.setHostname(docDbRemoteHost); properties.setSshUser(sshUser); properties.setSshHostname(sshHostname); properties.setSshPrivateKeyFile(docDbPrivKeyFile); properties.setSshStrictHostKeyChecking("false"); return properties; } private static class Runner implements Runnable { private static final SecureRandom RANDOM = new SecureRandom(); private final DocumentDbConnectionProperties properties; private final Queue<Exception> exceptions = new ConcurrentLinkedQueue<>(); Runner(final DocumentDbConnectionProperties properties) { this.properties = properties; } @Override public void run() { final int timeToWaitSECS = RANDOM.nextInt(5) + 1; final Instant timeoutTime = Instant.now().plus(timeToWaitSECS, ChronoUnit.SECONDS); DocumentDbConnection connection = null; try { connection = new DocumentDbConnection(properties); while (timeoutTime.isAfter(Instant.now())) { connection.isValid(1); TimeUnit.MILLISECONDS.sleep(100); } } catch (Exception e) { exceptions.add(e); } finally { if (connection != null) { try { connection.close(); Assertions.assertFalse(connection.isValid(1)); } catch (Exception e) { exceptions.add(e); } } } } Queue<Exception> getExceptions() { return exceptions; } } }
4,507
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbTestEnvironmentFactoryTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.bson.BsonDocument; import org.bson.Document; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Properties; import java.util.stream.Stream; class DocumentDbTestEnvironmentFactoryTest { private DocumentDbTestEnvironment testEnvironment; @BeforeAll static void setup() throws Exception { // Start the test environments. for (DocumentDbTestEnvironment testEnvironment : DocumentDbTestEnvironmentFactory.getConfiguredEnvironments()) { testEnvironment.start(); } } @AfterEach void afterEach() throws Exception { final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, null)) { schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); } } @AfterAll static void teardown() throws Exception { // Stop the test environments. for (DocumentDbTestEnvironment testEnvironment : DocumentDbTestEnvironmentFactory.getConfiguredEnvironments()) { testEnvironment.stop(); } } private static Stream<DocumentDbTestEnvironment> getTestEnvironments() { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream(); } @DisplayName("Tests connectivity of the MongoClient returned from each test environment.") @ParameterizedTest(name = "testEnvironmentClientConnectivity - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testEnvironmentClientConnectivity(final DocumentDbTestEnvironment testEnvironment) throws SQLException { this.testEnvironment = testEnvironment; try (MongoClient client = testEnvironment.createMongoClient()) { final MongoDatabase database = client.getDatabase(testEnvironment.getDatabaseName()); final Document document = database.runCommand(new Document("ping", 1)); Assertions.assertEquals(1.0, document.getDouble("ok")); } } @DisplayName("Tests connection string from each test environment.") @ParameterizedTest(name = "testEnvironmentConnectionString - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testEnvironmentConnectionString(final DocumentDbTestEnvironment testEnvironment) throws SQLException { this.testEnvironment = testEnvironment; final String connectionString = testEnvironment.getJdbcConnectionString(); try (Connection connection = DriverManager.getConnection(connectionString, new Properties())) { Assertions.assertTrue(connection instanceof DocumentDbConnection); final DatabaseMetaData metaData = connection.getMetaData(); Assertions.assertTrue(metaData instanceof DocumentDbDatabaseMetaData); final ResultSet schemas = metaData.getSchemas(); Assertions.assertNotNull(schemas); Assertions.assertTrue(schemas.next()); } } @DisplayName("Tests preparing simple consistent data from each test environment.") @ParameterizedTest(name = "testPrepareSimpleConsistentData - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testPrepareSimpleConsistentData(final DocumentDbTestEnvironment testEnvironment) throws SQLException { this.testEnvironment = testEnvironment; final String collectionName; final int recordCount = 10; try (MongoClient client = testEnvironment.createMongoClient()) { final MongoDatabase database = client.getDatabase(testEnvironment.getDatabaseName()); collectionName = testEnvironment.newCollectionName(true); final MongoCollection<BsonDocument> collection = database .getCollection(collectionName, BsonDocument.class); testEnvironment.prepareSimpleConsistentData(collection, recordCount); } try (MongoClient client = testEnvironment.createMongoClient()) { final MongoDatabase database = client.getDatabase(testEnvironment.getDatabaseName()); final MongoCollection<BsonDocument> collection = database .getCollection(collectionName, BsonDocument.class); Assertions.assertEquals(recordCount, collection.countDocuments()); } } }
4,508
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/DocumentDbDriverTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; /** * Tests for the DocumentDbDriver */ @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbDriverTest extends DocumentDbFlapDoodleTest { private static final String DATABASE_NAME = "database"; private static final String COLLECTION_NAME = "testDocumentDbDriverTest"; private static final String DRIVER_MAJOR_VERSION_KEY = "driver.major.version"; private static final String DRIVER_MINOR_VERSION_KEY = "driver.minor.version"; private static final String PROPERTIES_FILE_PATH = "/project.properties"; /** * Initializes the test class. * @throws SQLException if a driver manager error occurs. */ @BeforeAll public void initialize() throws SQLException, IOException { // Add 2 valid users to the local MongoDB instance. createUser(DATABASE_NAME, "user", "password"); createUser(DATABASE_NAME, "user name", "pass word"); prepareSimpleConsistentData(DATABASE_NAME, COLLECTION_NAME, 10, "user", "password"); } /** * Test for valid supported connection strings. */ @Test public void testValidConnectionString() throws SQLException, IOException { final int timeout = 15; //TODO : Fix the commented out tests. final String[] tests = new String[] { "jdbc:documentdb://user:password@localhost:" + getMongoPort() + "/database?tls=false", "jdbc:documentdb://user:password@localhost:" + getMongoPort() + "/database?tls=false", "jdbc:documentdb://user:password@127.0.0.1:" + getMongoPort() + "/database?tls=false", "jdbc:documentdb://user%20name:pass%20word@localhost:" + getMongoPort() + "/database?tls=false", //"jdbc:documentdb://user%20name:pass%20word@localhost:1/database?ssl=true", //"jdbc:documentdb://user%20name:pass%20word@localhost:1/database?tls=true", //"jdbc:documentdb://user%20name:pass%20word@localhost:1/database?replicaSet=rs0", }; for (String test : tests) { Assertions.assertNotNull(DriverManager.getDriver(test)); final Connection connection = DriverManager.getConnection(test); Assertions.assertNotNull(connection); Assertions.assertTrue(connection.isValid(timeout)); } } /** * Test invalid connection strings. */ @Test public void testInvalidConnectionString() { final Map<String, String> tests = new HashMap<String, String>() {{ put("jdbx:documentdb://localhost/database", "No suitable driver"); put("documentdb://localhost/database", "No suitable driver"); put("jdbc:documentdbx://localhost/database", "No suitable driver"); put("jdbc:mongodb://localhost/database", "No suitable driver"); }}; for (Entry<String, String> test : tests.entrySet()) { Assertions.assertEquals(test.getValue(), Assertions.assertThrows(SQLException.class, () -> DriverManager.getDriver(test.getKey())) .getMessage()); Assertions.assertEquals(String.format("No suitable driver found for %s", test.getKey()), Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection(test.getKey())) .getMessage()); } } /** * Test null connection strings. */ @Test public void testNullConnectionString() { Assertions.assertEquals("The url cannot be null", Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection(null)) .getMessage()); } /** * Test empty user/password/host/database on connection strings provided by properties. */ @Test public void testEmptyRequiredPropertiesOnConnectionString() throws SQLException { Properties properties = new Properties(); properties.put(DocumentDbConnectionProperty.PASSWORD.getName(), "password"); Connection connection = DriverManager.getConnection( String.format("jdbc:documentdb://user@localhost:%s/database?tls=false", getMongoPort()), properties); Assertions.assertNotNull(connection); properties = new Properties(); properties.put(DocumentDbConnectionProperty.USER.getName(), "user"); properties.put(DocumentDbConnectionProperty.PASSWORD.getName(), "password"); connection = DriverManager.getConnection( String.format("jdbc:documentdb://localhost:%s/database?tls=false", getMongoPort()), properties); Assertions.assertNotNull(connection); properties = new Properties(); properties.put(DocumentDbConnectionProperty.DATABASE.getName(), "database"); DriverManager.getConnection( String.format("jdbc:documentdb://user:password@localhost:%s/?tls=false", getMongoPort()), properties); Assertions.assertNotNull(connection); properties = new Properties(); properties.put(DocumentDbConnectionProperty.USER.getName(), "user"); properties.put(DocumentDbConnectionProperty.PASSWORD.getName(), "password"); properties.put( DocumentDbConnectionProperty.HOSTNAME.getName(), String.format("localhost:%s", getMongoPort())); DriverManager.getConnection( "jdbc:documentdb:///database?tls=false", properties); Assertions.assertNotNull(connection); properties = new Properties(); properties.put(DocumentDbConnectionProperty.USER.getName(), "user"); properties.put(DocumentDbConnectionProperty.PASSWORD.getName(), "password"); properties.put( DocumentDbConnectionProperty.HOSTNAME.getName(), String.format("localhost:%s", getMongoPort())); properties.put(DocumentDbConnectionProperty.DATABASE.getName(), "database"); DriverManager.getConnection( "jdbc:documentdb:///?tls=false", properties); Assertions.assertNotNull(connection); } /** * Test invalid connection strings that fail semantics check * @throws SQLException thrown when a driver or connection error is encountered. */ @Test public void testInvalidMongoDbConnectionString() throws SQLException { final Map<String, String> tests = new HashMap<String, String>() {{ put("jdbc:documentdb://localhost:1/database", "User and password are required to connect. Syntax: 'jdbc:documentdb://[<user>[:<password>]@]<hostname>/<database>[?options...]'"); put("jdbc:documentdb://username:password@localhost:1:2/database", "Valid hostname is required to connect. Syntax: 'jdbc:documentdb://[<user>[:<password>]@]<hostname>/<database>[?options...]'"); put("jdbc:documentdb://username:password@localhost:1/", "Database is required to connect. Syntax: 'jdbc:documentdb://[<user>[:<password>]@]<hostname>/<database>[?options...]'"); put("jdbc:documentdb://username@localhost:1/database", "User and password are required to connect. Syntax: 'jdbc:documentdb://[<user>[:<password>]@]<hostname>/<database>[?options...]'"); put("jdbc:documentdb://username:password@localhost:1?tls=true", "Database is required to connect. Syntax: 'jdbc:documentdb://[<user>[:<password>]@]<hostname>/<database>[?options...]'"); }}; for (Entry<String, String> test : tests.entrySet()) { Assertions.assertNotNull(DriverManager.getDriver(test.getKey())); Assertions.assertEquals(test.getValue(), Assertions.assertThrows(SQLException.class, () -> DriverManager.getConnection(test.getKey())) .getMessage()); } } @Test @DisplayName("Tests that correct driver major and minor version are returned.") void testDriverVersion() throws IOException { final DocumentDbDriver driver = new DocumentDbDriver(); // Retrieve the version metadata from properties file. final int majorVersion; final int minorVersion; try (InputStream is = DocumentDbDatabaseMetaData.class.getResourceAsStream(PROPERTIES_FILE_PATH)) { final Properties p = new Properties(); p.load(is); majorVersion = Integer.parseInt(p.getProperty(DRIVER_MAJOR_VERSION_KEY)); minorVersion = Integer.parseInt(p.getProperty(DRIVER_MINOR_VERSION_KEY)); } Assertions.assertEquals(majorVersion, driver.getMajorVersion()); Assertions.assertEquals(minorVersion, driver.getMinorVersion()); } }
4,509
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbJoinTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import java.util.ArrayList; import java.util.List; class DocumentDbJoinTest { @Test void validateCollectionKeys() { final List<String> leftPrimaryKeys = new ArrayList<>(); leftPrimaryKeys.add("_id"); final List<String> rightPrimaryKeys = new ArrayList<>(); rightPrimaryKeys.add("_id"); rightPrimaryKeys.add("other__id"); final List<String> keysUsed = new ArrayList<>(); keysUsed.add("_id"); final DocumentDbJoin documentDbJoin = Mockito.mock(DocumentDbJoin.class,Mockito.CALLS_REAL_METHODS); Assertions.assertDoesNotThrow(() -> { documentDbJoin.validateMinimumPrimaryKeysUsage(keysUsed,leftPrimaryKeys,rightPrimaryKeys); documentDbJoin.validateMinimumPrimaryKeysUsage(keysUsed,leftPrimaryKeys,rightPrimaryKeys); }); } @Test void validateCollectionKeysException() { final List<String> leftPrimaryKeys = new ArrayList<>(); leftPrimaryKeys.add("_id"); leftPrimaryKeys.add("_other__id"); final List<String> rightPrimaryKeys = new ArrayList<>(); rightPrimaryKeys.add("_id"); rightPrimaryKeys.add("_other__id"); rightPrimaryKeys.add("another__id"); final List<String> keysUsed = new ArrayList<>(); keysUsed.add("_id"); final DocumentDbJoin documentDbJoin = Mockito.mock(DocumentDbJoin.class,Mockito.CALLS_REAL_METHODS); Assertions.assertThrows(IllegalArgumentException.class, () -> documentDbJoin.validateMinimumPrimaryKeysUsage(keysUsed, leftPrimaryKeys, rightPrimaryKeys)); } }
4,510
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/sshtunnel/DocumentDbSshTunnelServerTest.java
/* * Copyright <2022> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.sshtunnel; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import java.sql.SQLException; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.ValidationType.SSH_TUNNEL; class DocumentDbSshTunnelServerTest { private final Object mutex = new Object(); @Test @Tag("remote-integration") void testAddRemoveClient() throws Exception { final String connectionString = DocumentDbSshTunnelClientTest.getConnectionString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties.getPropertiesFromConnectionString(connectionString, SSH_TUNNEL); final DocumentDbSshTunnelServer server = DocumentDbSshTunnelServer.builder( properties.getSshUser(), properties.getSshHostname(), properties.getSshPrivateKeyFile(), properties.getHostname()) .sshStrictHostKeyChecking(properties.getSshStrictHostKeyChecking()) .build(); final int timeoutSECS = 1; try { server.addClient(); Assertions.assertTrue(server.getServiceListeningPort() > 0); TimeUnit.SECONDS.sleep(timeoutSECS); Assertions.assertTrue(server.isAlive()); } finally { server.setCloseDelayMS(0); Assertions.assertNotNull(server); server.removeClient(); Assertions.assertEquals(0, server.getServiceListeningPort()); TimeUnit.SECONDS.sleep(timeoutSECS); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); // Extra remove is ignored. Assertions.assertDoesNotThrow(server::removeClient); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); } } @Test @Tag("remote-integration") void testAddRemoveClientDelayedClose() throws Exception { final String connectionString = DocumentDbSshTunnelClientTest.getConnectionString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties.getPropertiesFromConnectionString(connectionString, SSH_TUNNEL); final DocumentDbSshTunnelServer server = DocumentDbSshTunnelServer.builder( properties.getSshUser(), properties.getSshHostname(), properties.getSshPrivateKeyFile(), properties.getHostname()) .sshStrictHostKeyChecking(properties.getSshStrictHostKeyChecking()) .build(); final int timeoutSECS = 1; try { server.addClient(); Assertions.assertTrue(server.getServiceListeningPort() > 0); TimeUnit.SECONDS.sleep(timeoutSECS); Assertions.assertTrue(server.isAlive()); } finally { Assertions.assertNotNull(server); final int closeDelayMS = 5000; final int closeDelayTimeWithBuffer = closeDelayMS; server.setCloseDelayMS(closeDelayMS); server.removeClient(); final Instant expectedCloseTime = Instant.now().plus(closeDelayTimeWithBuffer, ChronoUnit.MILLIS); while (Instant.now().isBefore(expectedCloseTime)) { Assertions.assertTrue(server.getServiceListeningPort() != 0); Assertions.assertTrue(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); TimeUnit.MILLISECONDS.sleep(100); } TimeUnit.MILLISECONDS.sleep(100); Assertions.assertEquals(0, server.getServiceListeningPort()); TimeUnit.SECONDS.sleep(timeoutSECS); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); // Extra remove is ignored. Assertions.assertDoesNotThrow(server::removeClient); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); } } @Test @Tag("remote-integration") void testAddRemoveClientBeforeDelayedClose() throws Exception { final String connectionString = DocumentDbSshTunnelClientTest.getConnectionString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties.getPropertiesFromConnectionString(connectionString, SSH_TUNNEL); final DocumentDbSshTunnelServer server = DocumentDbSshTunnelServer.builder( properties.getSshUser(), properties.getSshHostname(), properties.getSshPrivateKeyFile(), properties.getHostname()) .sshStrictHostKeyChecking(properties.getSshStrictHostKeyChecking()) .build(); final int closeDelayMS = 2000; final int closeDelayTimeWithBuffer = closeDelayMS; final int timeoutSECS = 1; try { Assertions.assertNotNull(server); server.setCloseDelayMS(closeDelayMS); server.addClient(); Assertions.assertEquals(1, server.getClientCount()); Assertions.assertTrue(server.getServiceListeningPort() > 0); TimeUnit.MILLISECONDS.sleep(closeDelayMS * 2); Assertions.assertTrue(server.isAlive()); server.removeClient(); Assertions.assertEquals(0, server.getClientCount()); Assertions.assertTrue(server.isAlive()); TimeUnit.MILLISECONDS.sleep(closeDelayMS / 2); server.addClient(); Assertions.assertEquals(1, server.getClientCount()); Assertions.assertTrue(server.isAlive()); TimeUnit.MILLISECONDS.sleep(closeDelayMS * 2); Assertions.assertTrue(server.isAlive()); } finally { Assertions.assertNotNull(server); server.setCloseDelayMS(closeDelayMS); server.removeClient(); final Instant expectedCloseTime = Instant.now().plus(closeDelayTimeWithBuffer, ChronoUnit.MILLIS); while (Instant.now().isBefore(expectedCloseTime)) { Assertions.assertTrue(server.getServiceListeningPort() != 0); Assertions.assertTrue(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); TimeUnit.MILLISECONDS.sleep(100); } TimeUnit.MILLISECONDS.sleep(100); Assertions.assertEquals(0, server.getServiceListeningPort()); TimeUnit.SECONDS.sleep(timeoutSECS); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); // Extra remove is ignored. Assertions.assertDoesNotThrow(server::removeClient); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); } } @Test @Tag("remote-integration") void testAddRemoveClientAfterDelayedClose() throws Exception { final String connectionString = DocumentDbSshTunnelClientTest.getConnectionString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties.getPropertiesFromConnectionString(connectionString, SSH_TUNNEL); final DocumentDbSshTunnelServer server = DocumentDbSshTunnelServer.builder( properties.getSshUser(), properties.getSshHostname(), properties.getSshPrivateKeyFile(), properties.getHostname()) .sshStrictHostKeyChecking(properties.getSshStrictHostKeyChecking()) .build(); final int closeDelayMS = 2000; final int closeDelayTimeWithBuffer = closeDelayMS; final int timeoutSECS = 1; try { Assertions.assertNotNull(server); server.setCloseDelayMS(closeDelayMS); server.addClient(); Assertions.assertEquals(1, server.getClientCount()); Assertions.assertTrue(server.getServiceListeningPort() > 0); TimeUnit.MILLISECONDS.sleep(closeDelayMS * 2); Assertions.assertTrue(server.isAlive()); server.removeClient(); Assertions.assertEquals(0, server.getClientCount()); Assertions.assertTrue(server.isAlive()); TimeUnit.MILLISECONDS.sleep(closeDelayMS * 2); Assertions.assertFalse(server.isAlive()); server.addClient(); Assertions.assertEquals(1, server.getClientCount()); Assertions.assertTrue(server.isAlive()); TimeUnit.MILLISECONDS.sleep(closeDelayMS * 2); Assertions.assertTrue(server.isAlive()); } finally { Assertions.assertNotNull(server); server.setCloseDelayMS(closeDelayMS); server.removeClient(); final Instant expectedCloseTime = Instant.now().plus(closeDelayTimeWithBuffer, ChronoUnit.MILLIS); while (Instant.now().isBefore(expectedCloseTime)) { Assertions.assertTrue(server.getServiceListeningPort() != 0); Assertions.assertTrue(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); TimeUnit.MILLISECONDS.sleep(100); } TimeUnit.MILLISECONDS.sleep(100); Assertions.assertEquals(0, server.getServiceListeningPort()); TimeUnit.SECONDS.sleep(timeoutSECS); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); // Extra remove is ignored. Assertions.assertDoesNotThrow(server::removeClient); Assertions.assertFalse(server.isAlive()); Assertions.assertEquals(0, server.getClientCount()); } } @Test @Tag("remote-integration") void testAddRemoveClientMultiThreaded() throws SQLException, InterruptedException { final int numOfThreads = 10; final List<Thread> threads = new ArrayList<>(); final List<Runner> runners = new ArrayList<>(); final String connectionString = DocumentDbSshTunnelClientTest.getConnectionString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties.getPropertiesFromConnectionString(connectionString, SSH_TUNNEL); final DocumentDbSshTunnelServer server = DocumentDbSshTunnelServer.builder( properties.getSshUser(), properties.getSshHostname(), properties.getSshPrivateKeyFile(), properties.getHostname()) .sshStrictHostKeyChecking(properties.getSshStrictHostKeyChecking()) .build(); Assertions.assertNotNull(server); server.setCloseDelayMS(0); // Create all the runners and assign them to a thread. for (int i = 0; i < numOfThreads; i++) { final int runtimeSecs = numOfThreads - i; final Runner runner = new Runner(runtimeSecs, server); final Thread threadRunner = new Thread(runner); runners.add(runner); threads.add(threadRunner); } // Start all the threads. for (int i = 0; i < numOfThreads; i++) { threads.get(i).start(); } // Wait for the threads to complete. TimeUnit.SECONDS.sleep(1); while (threads.size() > 0) { TimeUnit.MILLISECONDS.sleep(100); synchronized (mutex) { // Allow thread to exit after releasing the MUTEX. TimeUnit.MILLISECONDS.sleep(10); final long clientCount = server.getClientCount(); int threadCount = 0; for (int i = threads.size() - 1; i >= 0; i--) { if (threads.get(i).isAlive()) { threadCount++; Assertions.assertTrue(server.isAlive()); } else { threads.get(i).join(); threads.remove(i); } } Assertions.assertEquals(clientCount, threadCount); Assertions.assertTrue((clientCount > 0 && server.isAlive()) || !server.isAlive()); } } // Ensure no more clients and no longer alive. Assertions.assertEquals(0, server.getClientCount()); Assertions.assertFalse(server.isAlive()); // Ensure clients didn't throw any exceptions. for (final Runner runner : runners) { Assertions.assertEquals(0, runner.getExceptions().size(), () -> runner.getExceptions().stream() .map(Throwable::getMessage) .collect(Collectors.joining("; "))); } } private class Runner implements Runnable { private final int runtimeSecs; private final DocumentDbSshTunnelServer server; private final Queue<Exception> exceptions = new ConcurrentLinkedDeque<>(); public Runner(final int runtimeSecs, final DocumentDbSshTunnelServer server) { this.runtimeSecs = runtimeSecs; this.server = server; } public Queue<Exception> getExceptions() { return exceptions; } @Override public void run() { try { synchronized (mutex) { server.addClient(); } TimeUnit.SECONDS.sleep(runtimeSecs); } catch (Exception e) { exceptions.add(e); } finally { try { synchronized (mutex) { server.removeClient(); } } catch (Exception e) { exceptions.add(e); } } } } }
4,511
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/sshtunnel/DocumentDbSshTunnelClientTest.java
/* * Copyright <2022> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.sshtunnel; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.ValidationType.SSH_TUNNEL; class DocumentDbSshTunnelClientTest { private static final String DOC_DB_PRIV_KEY_FILE_PROPERTY = "DOC_DB_PRIV_KEY_FILE"; private static final String DOC_DB_USER_PROPERTY = "DOC_DB_USER"; private static final String DOC_DB_HOST_PROPERTY = "DOC_DB_HOST"; @Test @Tag("remote-integration") void testConstructorDestructor() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); DocumentDbSshTunnelClient client = null; DocumentDbSshTunnelServer server = null; try { client = new DocumentDbSshTunnelClient(properties); server = client.getSshTunnelServer(); server.setCloseDelayMS(1000); Assertions.assertTrue(client.getServiceListeningPort() > 0); TimeUnit.SECONDS.sleep(1); Assertions.assertTrue(client.isServerAlive()); TimeUnit.SECONDS.sleep(1); Assertions.assertTrue(client.isServerAlive()); } finally { if (client != null) { client.close(); // This is the only client, so server will shut down. TimeUnit.MILLISECONDS.sleep(server.getCloseDelayMS() + 500); Assertions.assertNotNull(server); Assertions.assertFalse(client.isServerAlive()); } } } @Test @Tag("remote-integration") void testInvalidConnectionProperties() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); properties.setSshUser(""); Assertions.assertThrows(IllegalArgumentException.class, () -> new DocumentDbSshTunnelClient(properties)); } @Test @Tag("remote-integration") void testMultipleClientsSameServer() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); final List<DocumentDbSshTunnelClient> clients = new ArrayList<>(); try { for (int i = 0; i < 50; i++) { final DocumentDbSshTunnelClient client = new DocumentDbSshTunnelClient(properties); Assertions.assertNotNull(client); for (DocumentDbSshTunnelClient compareClient : clients) { // Each client is different Assertions.assertNotEquals(client, compareClient); // Each server with the same connection properties has the same server Assertions.assertEquals(client.getSshTunnelServer(), compareClient.getSshTunnelServer()); } clients.add(client); } } finally { int clientCount = clients.size(); final DocumentDbSshTunnelServer server = clients.get(0).getSshTunnelServer(); server.setCloseDelayMS(0); for (DocumentDbSshTunnelClient client : clients) { client.close(); clientCount--; if (clientCount > 0) { Assertions.assertTrue(client.getSshTunnelServer().isAlive()); } else { Assertions.assertFalse(client.getSshTunnelServer().isAlive()); } } } } @Test @Tag("remote-integration") void testInvalidSshHostnameConnectionTimeout() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); properties.setSshHostname("2.2.2.2"); final Exception e = Assertions.assertThrows( SQLException.class, () -> new DocumentDbSshTunnelClient(properties)); Assertions.assertTrue(e.toString().startsWith( "java.sql.SQLException: java.net.ConnectException: Connection timed out") || e.toString().startsWith("java.sql.SQLException: java.net.ConnectException: Operation timed out")); } @Test @Tag("remote-integration") void testInvalidSshUserAuthFail() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); properties.setSshUser("unknown"); final Exception e = Assertions.assertThrows( SQLException.class, () -> new DocumentDbSshTunnelClient(properties)); final String os = System.getProperty("os.name"); if (os.toLowerCase().startsWith("mac")) { Assertions.assertEquals("java.sql.SQLException: Auth fail for methods 'publickey'", e.toString()); } else { Assertions.assertEquals("java.sql.SQLException: Auth fail for methods 'publickey,gssapi-keyex,gssapi-with-mic'", e.toString()); } } @Test @Tag("remote-integration") void testInvalidSshPrivateKeyFileNotFound() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); properties.setSshPrivateKeyFile("unknown"); final Exception e = Assertions.assertThrows( SQLException.class, () -> new DocumentDbSshTunnelClient(properties)); Assertions.assertEquals("java.sql.SQLException: SSH private key file 'unknown' not found.", e.toString()); } @Test @Tag("remote-integration") void testInvalidSshKnownHostsFileNotFound() throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(); properties.setSshStrictHostKeyChecking("true"); properties.setSshKnownHostsFile("unknown"); final Exception e = Assertions.assertThrows( SQLException.class, () -> new DocumentDbSshTunnelClient(properties)); Assertions.assertEquals("java.sql.SQLException: 'Known hosts' file 'unknown' not found.", e.toString()); } private static DocumentDbConnectionProperties getConnectionProperties() throws SQLException { final String connectionString = getConnectionString(); return DocumentDbConnectionProperties.getPropertiesFromConnectionString(connectionString, SSH_TUNNEL); } static String getConnectionString() { final String docDbRemoteHost = System.getenv(DOC_DB_HOST_PROPERTY); final String docDbSshUserAndHost = System.getenv(DOC_DB_USER_PROPERTY); final int userSeparatorIndex = docDbSshUserAndHost.indexOf('@'); final String sshUser = docDbSshUserAndHost.substring(0, userSeparatorIndex); final String sshHostname = docDbSshUserAndHost.substring(userSeparatorIndex + 1); final String docDbSshPrivKeyFile = System.getenv(DOC_DB_PRIV_KEY_FILE_PROPERTY); final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(); properties.setHostname(docDbRemoteHost); properties.setSshUser(sshUser); properties.setSshHostname(sshHostname); properties.setSshPrivateKeyFile(docDbSshPrivKeyFile); properties.setSshStrictHostKeyChecking(String.valueOf(false)); return DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME + properties.buildSshConnectionString(); } }
4,512
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/DatabaseMetaDataTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockConnection; import software.amazon.documentdb.jdbc.common.mock.MockDatabaseMetadata; import software.amazon.documentdb.jdbc.common.mock.MockStatement; import java.sql.RowIdLifetime; import java.util.Properties; /** * Test for abstract DatabaseMetaData Object. */ public class DatabaseMetaDataTest { private java.sql.DatabaseMetaData databaseMetaData; private java.sql.Connection connection; @BeforeEach void initialize() { connection = new MockConnection(new Properties()); databaseMetaData = new MockDatabaseMetadata(connection); } @Test void testSupport() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsANSI92FullSQL(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsANSI92IntermediateSQL(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsAlterTableWithAddColumn(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsAlterTableWithDropColumn(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsBatchUpdates(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCatalogsInDataManipulation(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCatalogsInIndexDefinitions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCatalogsInPrivilegeDefinitions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCatalogsInProcedureCalls(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCatalogsInTableDefinitions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsColumnAliasing(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsConvert(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsConvert(0, 0), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCoreSQLGrammar(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsCorrelatedSubqueries(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsDataDefinitionAndDataManipulationTransactions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsDataManipulationTransactionsOnly(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsDifferentTableCorrelationNames(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsExpressionsInOrderBy(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsExtendedSQLGrammar(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsFullOuterJoins(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsGetGeneratedKeys(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsGroupBy(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsGroupByBeyondSelect(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsGroupByUnrelated(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsIntegrityEnhancementFacility(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsLikeEscapeClause(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsLimitedOuterJoins(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsMinimumSQLGrammar(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsMixedCaseIdentifiers(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsMixedCaseQuotedIdentifiers(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsMultipleOpenResults(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsMultipleResultSets(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsMultipleTransactions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsNamedParameters(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsNonNullableColumns(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsOpenCursorsAcrossCommit(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsOpenCursorsAcrossRollback(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsOpenStatementsAcrossCommit(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsOpenStatementsAcrossRollback(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsOrderByUnrelated(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsOuterJoins(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsPositionedDelete(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsPositionedUpdate(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsResultSetHoldability(0), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSavepoints(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSchemasInDataManipulation(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSchemasInIndexDefinitions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSchemasInPrivilegeDefinitions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSchemasInProcedureCalls(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSchemasInTableDefinitions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSelectForUpdate(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsStatementPooling(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsStoredFunctionsUsingCallSyntax(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsStoredProcedures(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSubqueriesInComparisons(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSubqueriesInExists(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSubqueriesInIns(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsSubqueriesInQuantifieds(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsTableCorrelationNames(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsTransactionIsolationLevel(0), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsTransactions(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsUnion(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsUnionAll(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsResultSetConcurrency( java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_READ_ONLY), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsResultSetConcurrency( java.sql.ResultSet.TYPE_FORWARD_ONLY, java.sql.ResultSet.CONCUR_UPDATABLE), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsResultSetConcurrency( java.sql.ResultSet.TYPE_SCROLL_SENSITIVE, java.sql.ResultSet.CONCUR_READ_ONLY), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsResultSetType( java.sql.ResultSet.TYPE_FORWARD_ONLY), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsResultSetType( java.sql.ResultSet.TYPE_SCROLL_SENSITIVE), false); } @Test void testMaxValues() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxCharLiteralLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxColumnNameLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxColumnsInGroupBy(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxColumnsInIndex(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxColumnsInOrderBy(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxColumnsInSelect(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxColumnsInTable(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxConnections(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxCursorNameLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxIndexLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxProcedureNameLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxSchemaNameLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxStatements(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxBinaryLiteralLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxTablesInSelect(), 1); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxUserNameLength(), 0); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxCatalogNameLength(), 60); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxTableNameLength(), 60); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getMaxStatementLength(), 65536); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getDefaultTransactionIsolation(), java.sql.Connection.TRANSACTION_NONE); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getResultSetHoldability(), java.sql.ResultSet.CLOSE_CURSORS_AT_COMMIT); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getSQLStateType(), java.sql.DatabaseMetaData.sqlStateSQL); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getProcedureTerm(), ""); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getSchemaTerm(), ""); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getIdentifierQuoteString(), "\""); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getRowIdLifetime(), RowIdLifetime.ROWID_UNSUPPORTED); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getCrossReference("", "", "", "", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getExportedKeys("", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getFunctionColumns("", "", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getFunctions("", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getProcedureColumns("", "", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getPseudoColumns("", "", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getTablePrivileges("", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getUDTs("", "", "", new int[]{})); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getVersionColumns("", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getSuperTables("", "", "")); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.getSuperTypes("", "", "")); } @Test void testConnection() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getConnection(), connection); } @Test void testWrap() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.isWrapperFor(MockDatabaseMetadata.class), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.isWrapperFor(MockStatement.class), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.isWrapperFor(null), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.unwrap(MockDatabaseMetadata.class), databaseMetaData); HelperFunctions.expectFunctionThrows(() -> databaseMetaData.unwrap(MockStatement.class)); } @Test void testDriverVersion() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getDriverMajorVersion(), Driver.DRIVER_MAJOR_VERSION); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getDriverMinorVersion(), Driver.DRIVER_MINOR_VERSION); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.getDriverVersion(), Driver.DRIVER_VERSION); } @Test void testUpdates() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.updatesAreDetected(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.usesLocalFilePerTable(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.usesLocalFiles(), false); } @Test void testAll() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.allProceduresAreCallable(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.allTablesAreSelectable(), true); } @Test void testDataDefinition() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.dataDefinitionCausesTransactionCommit(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.dataDefinitionIgnoredInTransactions(), false); } @Test void testMisc() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.autoCommitFailureClosesAllResultSets(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.deletesAreDetected(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.insertsAreDetected(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.locatorsUpdateCopy(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.generatedKeyAlwaysReturned(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.doesMaxRowSizeIncludeBlobs(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.isCatalogAtStart(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.isReadOnly(), true); } @Test void testNull() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.nullPlusNonNullIsNull(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.nullsAreSortedAtEnd(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.nullsAreSortedAtStart(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.nullsAreSortedHigh(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.nullsAreSortedLow(), false); } @Test void testOthers() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.othersDeletesAreVisible(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.othersInsertsAreVisible(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.othersUpdatesAreVisible(1), false); } @Test void testOwn() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.ownDeletesAreVisible(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.ownInsertsAreVisible(1), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.ownUpdatesAreVisible(1), false); } @Test void testStores() { HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.storesLowerCaseIdentifiers(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.storesLowerCaseQuotedIdentifiers(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.storesUpperCaseIdentifiers(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.storesUpperCaseQuotedIdentifiers(), false); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.storesMixedCaseIdentifiers(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.storesMixedCaseQuotedIdentifiers(), true); HelperFunctions.expectFunctionDoesntThrow(() -> databaseMetaData.supportsANSI92EntryLevelSQL(), true); } }
4,513
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/DataSourceTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockDataSource; import software.amazon.documentdb.jdbc.common.mock.MockStatement; /** * Test for abstract DataSource Object. */ public class DataSourceTest { private javax.sql.DataSource dataSource; @BeforeEach void initialize() { dataSource = new MockDataSource(); } @Test void testUnwrap() { HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.isWrapperFor(MockDataSource.class), true); HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.isWrapperFor(MockStatement.class), false); HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.isWrapperFor(null), false); HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.unwrap(MockDataSource.class), dataSource); HelperFunctions.expectFunctionThrows(() -> dataSource.unwrap(MockStatement.class)); } @Test void testLoggers() { HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.getLogWriter(), null); HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.setLogWriter(null)); HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.getLogWriter(), null); HelperFunctions.expectFunctionDoesntThrow(() -> dataSource.getParentLogger()); } }
4,514
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/PreparedStatementTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockConnection; import software.amazon.documentdb.jdbc.common.mock.MockPreparedStatement; import java.io.InputStream; import java.io.Reader; import java.sql.Blob; import java.sql.Clob; import java.sql.NClob; import java.util.Properties; /** * Test for abstract PreparedStatement Object. */ public class PreparedStatementTest { private java.sql.Connection connection; private java.sql.PreparedStatement preparedStatement; @BeforeEach void initialize() { connection = new MockConnection(new Properties()); preparedStatement = new MockPreparedStatement(connection, ""); } @Test void testExecute() { HelperFunctions.expectFunctionDoesntThrow(() -> preparedStatement.execute(), true); HelperFunctions.expectFunctionThrows(() -> preparedStatement.execute("")); HelperFunctions.expectFunctionThrows(() -> preparedStatement.executeQuery("")); HelperFunctions.expectFunctionThrows(() -> preparedStatement.executeUpdate()); } @Test void testMisc() { HelperFunctions.expectFunctionThrows(() -> preparedStatement.addBatch()); HelperFunctions.expectFunctionThrows(() -> preparedStatement.clearParameters()); HelperFunctions.expectFunctionThrows(() -> preparedStatement.getParameterMetaData()); } @Test @SuppressWarnings("deprecation") void testSet() { HelperFunctions.expectFunctionThrows(() -> preparedStatement.setArray(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setAsciiStream(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setAsciiStream(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setAsciiStream(0, null, (long)0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setAsciiStream(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBigDecimal(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBinaryStream(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBinaryStream(0, null, (long)0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBinaryStream(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBlob(0, (Blob)null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBlob(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBlob(0, (InputStream)null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBoolean(0, false)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setByte(0, (byte)0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setBytes(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setCharacterStream(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setCharacterStream(0, null, (long)0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setCharacterStream(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setClob(0, (Clob)null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setClob(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setClob(0, (Reader)null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setDate(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setDate(0, null, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setDouble(0, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setFloat(0, (float)0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setInt(0, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setLong(0, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNCharacterStream(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNCharacterStream(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNClob(0, (NClob)null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNClob(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNClob(0, (Reader)null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNString(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNull(0, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setNull(0, 0, "")); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setObject(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setObject(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setObject(0, null, 0, 0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setRef(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setRowId(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setSQLXML(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setShort(0, (short)0)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setString(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setTime(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setTime(0, null, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setTimestamp(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setTimestamp(0, null, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setURL(0, null)); HelperFunctions.expectFunctionThrows(() -> preparedStatement.setUnicodeStream(0, null, 0)); } }
4,515
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/PooledConnectionTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockConnection; import software.amazon.documentdb.jdbc.common.mock.MockPooledConnection; import javax.sql.ConnectionEvent; import javax.sql.ConnectionEventListener; import java.util.Properties; /** * Test for abstract PooledConnection Object. */ public class PooledConnectionTest { private javax.sql.PooledConnection pooledConnection; private boolean isClosed; private boolean isError; private ConnectionEventListener listener = new ConnectionEventListener() { @Override public void connectionClosed(final ConnectionEvent event) { isClosed = true; } @Override public void connectionErrorOccurred(final ConnectionEvent event) { isError = true; } }; @BeforeEach void initialize() { pooledConnection = new MockPooledConnection(new MockConnection(new Properties())); isClosed = false; isError = false; } @Test void testListeners() { pooledConnection.addConnectionEventListener(listener); Assertions.assertFalse(isClosed); Assertions.assertFalse(isError); HelperFunctions.expectFunctionDoesntThrow(() -> pooledConnection.close()); Assertions.assertTrue(isClosed); Assertions.assertFalse(isError); pooledConnection.removeConnectionEventListener(listener); isClosed = false; HelperFunctions.expectFunctionDoesntThrow(() -> pooledConnection.close()); Assertions.assertFalse(isClosed); Assertions.assertFalse(isError); pooledConnection.addStatementEventListener(null); pooledConnection.removeStatementEventListener(null); } }
4,516
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/ResultSetMetaDataTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockResultSetMetaData; import software.amazon.documentdb.jdbc.common.mock.MockStatement; /** * Test for abstract ResultSetMetaData Object. */ public class ResultSetMetaDataTest { private java.sql.ResultSetMetaData resultSetMetaData; @BeforeEach void initialize() { resultSetMetaData = new MockResultSetMetaData(); } @Test void testWrap() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSetMetaData.isWrapperFor(MockResultSetMetaData.class), true); HelperFunctions.expectFunctionDoesntThrow(() -> resultSetMetaData.isWrapperFor(MockStatement.class), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSetMetaData.isWrapperFor(null), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSetMetaData.unwrap(MockResultSetMetaData.class), resultSetMetaData); HelperFunctions.expectFunctionThrows(() -> resultSetMetaData.unwrap(MockStatement.class)); } }
4,517
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/StatementTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockConnection; import software.amazon.documentdb.jdbc.common.mock.MockResultSet; import software.amazon.documentdb.jdbc.common.mock.MockStatement; import java.sql.ResultSet; import java.sql.SQLWarning; import java.util.Properties; /** * Test for abstract Statement Object. */ public class StatementTest { private java.sql.Statement statement; private java.sql.Connection connection; @BeforeEach void initialize() { connection = new MockConnection(new Properties()); statement = new MockStatement(connection); } @Test void testSetGetIs() { HelperFunctions.expectFunctionThrows(() -> statement.setPoolable(false)); HelperFunctions.expectFunctionThrows(() -> statement.setCursorName("")); HelperFunctions.expectFunctionThrows(() -> statement.setFetchDirection(ResultSet.FETCH_REVERSE)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setFetchDirection(ResultSet.FETCH_FORWARD)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setEscapeProcessing(false)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setFetchSize(0)); HelperFunctions.expectFunctionThrows(() -> statement.setFetchSize(-1)); HelperFunctions.expectFunctionThrows(() -> statement.setLargeMaxRows(-1)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setLargeMaxRows(1)); HelperFunctions.expectFunctionThrows(() -> statement.setMaxFieldSize(-1)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setMaxFieldSize(1)); HelperFunctions.expectFunctionThrows(() -> statement.setMaxRows(-1)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setMaxRows(1)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getConnection(), connection); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getFetchDirection(), ResultSet.FETCH_FORWARD); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getFetchSize(), 0); HelperFunctions.expectFunctionThrows(() -> statement.getGeneratedKeys()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getLargeMaxRows(), (long)1); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getMaxFieldSize(), 1); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getLargeUpdateCount(), (long)-1); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getMaxRows(), 1); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getMoreResults(), false); ((MockStatement)statement).setResultSet(new MockResultSet(statement)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.execute("")); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getMoreResults( java.sql.Statement.CLOSE_CURRENT_RESULT), false); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getResultSet(), null); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getResultSetConcurrency(), ResultSet.CONCUR_READ_ONLY); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getResultSetHoldability(), ResultSet.CLOSE_CURSORS_AT_COMMIT); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getResultSetType(), ResultSet.TYPE_FORWARD_ONLY); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getUpdateCount(), -1); HelperFunctions.expectFunctionDoesntThrow(() -> statement.closeOnCompletion()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.isCloseOnCompletion(), true); HelperFunctions.expectFunctionDoesntThrow(() -> statement.isPoolable(), false); HelperFunctions.expectFunctionDoesntThrow(() -> statement.setLargeMaxRows(Long.MAX_VALUE)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getMaxRows(), Integer.MAX_VALUE); } @Test void testExecute() { HelperFunctions.expectFunctionDoesntThrow(() -> statement.execute(""), true); HelperFunctions.expectFunctionDoesntThrow(() -> statement.execute("", 0), true); HelperFunctions.expectFunctionDoesntThrow(() -> statement.execute("", new int[]{}), true); HelperFunctions.expectFunctionDoesntThrow(() -> statement.execute("", new String[]{}), true); HelperFunctions.expectFunctionThrows(() -> statement.executeBatch()); HelperFunctions.expectFunctionThrows(() -> statement.executeLargeBatch()); HelperFunctions.expectFunctionThrows(() -> statement.executeLargeUpdate("")); HelperFunctions.expectFunctionThrows(() -> statement.executeLargeUpdate("", 0)); HelperFunctions.expectFunctionThrows(() -> statement.executeLargeUpdate("", new int[]{})); HelperFunctions.expectFunctionThrows(() -> statement.executeLargeUpdate("", new String[]{})); HelperFunctions.expectFunctionThrows(() -> statement.executeUpdate("")); HelperFunctions.expectFunctionThrows(() -> statement.executeUpdate("", 0)); HelperFunctions.expectFunctionThrows(() -> statement.executeUpdate("", new int[]{})); HelperFunctions.expectFunctionThrows(() -> statement.executeUpdate("", new String[]{})); HelperFunctions.expectFunctionThrows(() -> statement.executeBatch()); } @Test void testMisc() { HelperFunctions.expectFunctionDoesntThrow(() -> statement.cancel()); HelperFunctions.expectFunctionThrows(() -> statement.addBatch("")); HelperFunctions.expectFunctionThrows(() -> statement.clearBatch()); } @Test void testClosed() { HelperFunctions.expectFunctionDoesntThrow(() -> statement.isClosed(), false); HelperFunctions.expectFunctionDoesntThrow(() -> statement.close()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.isClosed(), true); HelperFunctions.expectFunctionThrows(() -> ((Statement)statement).verifyOpen()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.close()); } @Test void testResultSetClose() { HelperFunctions.expectFunctionDoesntThrow(() -> statement.isClosed(), false); ((MockStatement)statement).setResultSet(new MockResultSet(statement)); HelperFunctions.expectFunctionDoesntThrow(() -> statement.execute("")); HelperFunctions.expectFunctionDoesntThrow(() -> statement.close()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.isClosed(), true); HelperFunctions.expectFunctionThrows(() -> ((Statement)statement).verifyOpen()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.close()); } @Test void testWrap() { HelperFunctions.expectFunctionDoesntThrow(() -> statement.isWrapperFor(MockStatement.class), true); HelperFunctions.expectFunctionDoesntThrow(() -> statement.isWrapperFor(MockConnection.class), false); HelperFunctions.expectFunctionDoesntThrow(() -> statement.isWrapperFor(null), false); HelperFunctions.expectFunctionDoesntThrow(() -> statement.unwrap(MockStatement.class), statement); HelperFunctions.expectFunctionThrows(() -> statement.unwrap(MockConnection.class)); } @Test void testWarnings() { HelperFunctions.expectFunctionDoesntThrow(() -> statement.getWarnings(), null); HelperFunctions.expectFunctionDoesntThrow(() -> statement.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getWarnings(), null); HelperFunctions.expectFunctionDoesntThrow(() -> ((Statement)statement).addWarning(HelperFunctions.getNewWarning1())); final SQLWarning warning = HelperFunctions.getNewWarning1(); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getWarnings(), warning); warning.setNextWarning(HelperFunctions.getNewWarning2()); HelperFunctions.expectFunctionDoesntThrow(() -> ((Statement)statement).addWarning(HelperFunctions.getNewWarning2())); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getWarnings(), warning); HelperFunctions.expectFunctionDoesntThrow(() -> statement.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> statement.getWarnings(), null); } }
4,518
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/ResultSetTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockConnection; import software.amazon.documentdb.jdbc.common.mock.MockResultSet; import software.amazon.documentdb.jdbc.common.mock.MockStatement; import java.io.InputStream; import java.io.Reader; import java.sql.Blob; import java.sql.Clob; import java.sql.NClob; import java.sql.SQLWarning; import java.util.Map; import java.util.Properties; /** * Test for abstract ResultSet Object. */ public class ResultSetTest { private java.sql.ResultSet resultSet; private java.sql.Statement statement; @BeforeEach void initialize() { statement = new MockStatement(new MockConnection(new Properties())); resultSet = new MockResultSet(statement); } @Test @SuppressWarnings("deprecation") void testGetType() { HelperFunctions.expectFunctionThrows(() -> resultSet.getArray(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getArray("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getAsciiStream(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getAsciiStream("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getBigDecimal(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBigDecimal(0, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBigDecimal("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getBigDecimal("", 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBinaryStream(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBinaryStream("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getBlob(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBlob("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getBoolean(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBoolean("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getByte(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getByte("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getBytes(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getBytes("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getCharacterStream(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getCharacterStream("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getClob(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getClob("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getDate(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getDate("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getDate(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getDate("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getDouble(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getDouble("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getFloat(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getFloat("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getInt(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getInt("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getLong(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getLong("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getNCharacterStream(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getNCharacterStream("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getNClob(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getNClob("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getNString(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getNString("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject(0, (Class<?>)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject("", (Class<?>)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject(0, (Map<String, Class<?>>)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject("", (Map<String, Class<?>>)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getObject("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getRef(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getRef("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getRowId(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getRowId("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getShort(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getShort("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getSQLXML(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getSQLXML("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getString(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getString("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getTime(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getTime("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getTime(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getTime("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getTimestamp(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getTimestamp("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getTimestamp(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getTimestamp("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.getUnicodeStream(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getUnicodeStream("")); HelperFunctions.expectFunctionThrows(() -> resultSet.getURL(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.getURL("")); } @Test void testUpdate() { HelperFunctions.expectFunctionThrows(() -> resultSet.updateArray(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateArray("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateAsciiStream(0, null, (long)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateAsciiStream("", null, (long)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateAsciiStream(0, null, (int)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateAsciiStream("", null, (int)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateAsciiStream(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateAsciiStream("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBigDecimal(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBigDecimal("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBinaryStream(0, null, (long)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBinaryStream("", null, (long)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBinaryStream(0, null, (int)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBinaryStream("", null, (int)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBinaryStream(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBinaryStream("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBlob(0, (Blob)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBlob("", (Blob)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBlob(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBlob("", null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBlob(0, (InputStream)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBlob("", (InputStream)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBoolean(0, false)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBoolean("", false)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateByte(0, (byte)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateByte("", (byte)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBytes(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateBytes("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateCharacterStream(0, null, (long)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateCharacterStream("", null, (long)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateCharacterStream(0, null, (int)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateCharacterStream("", null, (int)0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateCharacterStream(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateCharacterStream("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateClob(0, (Clob)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateClob("", (Clob)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateClob(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateClob("", null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateClob(0, (Reader)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateClob("", (Reader)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateDate(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateDate("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateDouble(0, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateDouble("", 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateFloat(0, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateFloat("", 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateInt(0, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateInt("", 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateLong(0, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateLong("", 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNCharacterStream(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNCharacterStream("", null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNCharacterStream(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNCharacterStream("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNClob(0, (NClob)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNClob("", (NClob)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNClob(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNClob("", null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNClob(0, (Reader)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNClob("", (Reader)null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNString(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNString("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNull(0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateNull("")); HelperFunctions.expectFunctionThrows(() -> resultSet.updateObject(0, null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateObject(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateObject("", null, 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateObject("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateRef(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateRef("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateRow()); HelperFunctions.expectFunctionThrows(() -> resultSet.updateRowId(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateRowId("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateSQLXML(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateSQLXML("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateShort(0, (short) 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateShort("", (short) 0)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateString(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateString("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateTime(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateTime("", null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateTimestamp(0, null)); HelperFunctions.expectFunctionThrows(() -> resultSet.updateTimestamp("", null)); } @Test void testRow() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.rowDeleted(), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.rowInserted(), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.rowUpdated(), false); ((MockResultSet)resultSet).setRowIdx(0); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.relative(1), true); ((MockResultSet)resultSet).setRowIdx(0); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.relative(10), false); HelperFunctions.expectFunctionThrows(() -> resultSet.relative(-1)); HelperFunctions.expectFunctionThrows(() -> resultSet.moveToCurrentRow()); HelperFunctions.expectFunctionThrows(() -> resultSet.refreshRow()); HelperFunctions.expectFunctionThrows(() -> resultSet.previous()); HelperFunctions.expectFunctionThrows(() -> resultSet.insertRow()); HelperFunctions.expectFunctionThrows(() -> resultSet.moveToInsertRow()); HelperFunctions.expectFunctionThrows(() -> resultSet.deleteRow()); HelperFunctions.expectFunctionThrows(() -> resultSet.cancelRowUpdates()); ((MockResultSet)resultSet).setRowIdx(0); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isAfterLast(), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isBeforeFirst(), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isFirst(), true); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isLast(), false); ((MockResultSet)resultSet).setRowIdx(-1); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isBeforeFirst(), true); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isFirst(), false); ((MockResultSet)resultSet).setRowIdx(9); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isLast(), true); ((MockResultSet)resultSet).setRowIdx(10); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isAfterLast(), true); ((MockResultSet)resultSet).setRowIdx(0); HelperFunctions.expectFunctionThrows(() -> resultSet.first()); HelperFunctions.expectFunctionThrows(() -> resultSet.last()); HelperFunctions.expectFunctionThrows(() -> resultSet.beforeFirst()); HelperFunctions.expectFunctionThrows(() -> resultSet.afterLast()); ((MockResultSet)resultSet).setRowIdx(0); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getRow(), 1); HelperFunctions.expectFunctionThrows(() -> resultSet.absolute(-1)); HelperFunctions.expectFunctionThrows(() -> resultSet.absolute(0)); ((MockResultSet)resultSet).setRowIdx(10); HelperFunctions.expectFunctionThrows(() -> resultSet.absolute(0)); ((MockResultSet)resultSet).setRowIdx(0); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.absolute(5), true); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.absolute(11), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getRow(), 0); } @Test void testFetch() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.setFetchDirection(java.sql.ResultSet.FETCH_FORWARD)); HelperFunctions.expectFunctionThrows(() -> resultSet.setFetchDirection(java.sql.ResultSet.FETCH_REVERSE)); HelperFunctions.expectFunctionThrows(() -> resultSet.setFetchDirection(java.sql.ResultSet.FETCH_UNKNOWN)); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getFetchDirection(), java.sql.ResultSet.FETCH_FORWARD); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getType(), java.sql.ResultSet.TYPE_FORWARD_ONLY); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getConcurrency(), java.sql.ResultSet.CONCUR_READ_ONLY); HelperFunctions.expectFunctionThrows(() -> resultSet.getCursorName()); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getFetchSize(), 0); HelperFunctions.expectFunctionThrows(() -> resultSet.setFetchSize(-1)); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.setFetchSize(0)); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.setFetchSize(1)); } @Test void testGetStatement() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getStatement(), statement); } @Test void testWrap() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isWrapperFor(MockResultSet.class), true); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isWrapperFor(MockStatement.class), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isWrapperFor(null), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.unwrap(MockResultSet.class), resultSet); HelperFunctions.expectFunctionThrows(() -> resultSet.unwrap(MockStatement.class)); } @Test void testClosed() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isClosed(), false); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.close()); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.isClosed(), true); HelperFunctions.expectFunctionThrows(() -> ((ResultSet)resultSet).verifyOpen()); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.close()); } @Test void testWarnings() { HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getWarnings(), null); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getWarnings(), null); HelperFunctions.expectFunctionDoesntThrow(() -> ((ResultSet)resultSet).addWarning(HelperFunctions.getNewWarning1())); final SQLWarning warning = HelperFunctions.getNewWarning1(); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getWarnings(), warning); warning.setNextWarning(HelperFunctions.getNewWarning2()); HelperFunctions.expectFunctionDoesntThrow(() -> ((ResultSet)resultSet).addWarning(HelperFunctions.getNewWarning2())); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getWarnings(), warning); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> resultSet.getWarnings(), null); } }
4,519
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/ConnectionTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common; import com.google.common.collect.ImmutableMap; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.helpers.HelperFunctions; import software.amazon.documentdb.jdbc.common.mock.MockConnection; import software.amazon.documentdb.jdbc.common.mock.MockStatement; import software.amazon.documentdb.jdbc.common.utilities.ConnectionProperty; import java.sql.ResultSet; import java.sql.SQLWarning; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** * Test for abstract Connection Object. */ public class ConnectionTest { private java.sql.Connection connection; private static final String TEST_SCHEMA = "schema"; private static final String TEST_CATALOG = "catalog"; private static final String TEST_NATIVE_SQL = "native sql"; private static final String TEST_PROP_KEY_UNSUPPORTED = "unsupported"; private static final String TEST_PROP_VAL_UNSUPPORTED = "unsupported"; private static final String TEST_PROP_KEY = ConnectionProperty.APPLICATION_NAME; private static final String TEST_PROP_VAL = Driver.APPLICATION_NAME; private static final Properties TEST_PROP = new Properties(); private static final Properties TEST_PROP_EMPTY = new Properties(); private static final Map<String, Class<?>> TEST_TYPE_MAP = new ImmutableMap.Builder<String, Class<?>>().put("String", String.class).build(); @BeforeEach void initialize() { connection = new MockConnection(new Properties()); TEST_PROP.setProperty(TEST_PROP_KEY, TEST_PROP_VAL); TEST_PROP.setProperty(ConnectionProperty.APPLICATION_NAME, Driver.APPLICATION_NAME); TEST_PROP_EMPTY.setProperty(ConnectionProperty.APPLICATION_NAME, Driver.APPLICATION_NAME); } @Test void testTransactions() { // Transaction isolation. HelperFunctions.expectFunctionDoesntThrow(() -> connection.setTransactionIsolation(Connection.TRANSACTION_NONE)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getTransactionIsolation(), Connection.TRANSACTION_NONE); HelperFunctions.expectFunctionThrows(() -> connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED)); HelperFunctions.expectFunctionThrows(() -> connection.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED)); HelperFunctions.expectFunctionThrows(() -> connection.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ)); HelperFunctions.expectFunctionThrows(() -> connection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE)); // Savepoint. HelperFunctions.expectFunctionThrows(() -> connection.setSavepoint()); HelperFunctions.expectFunctionThrows(() -> connection.setSavepoint(null)); HelperFunctions.expectFunctionThrows(() -> connection.releaseSavepoint(null)); // Rollback. HelperFunctions.expectFunctionThrows(() -> connection.rollback(null)); HelperFunctions.expectFunctionThrows(() -> connection.rollback()); // Commit. HelperFunctions.expectFunctionThrows(() -> connection.commit()); // Abort. HelperFunctions.expectFunctionThrows(() -> connection.abort(null)); // Holdability. HelperFunctions.expectFunctionThrows(() -> connection.setHoldability(0)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getHoldability(), ResultSet.CLOSE_CURSORS_AT_COMMIT); } @Test void testStatements() { // Statement without transaction. HelperFunctions.expectFunctionDoesntThrow(() -> connection.createStatement(), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.createStatement(0, 0), null); // Statement with transaction. HelperFunctions.expectFunctionThrows(() -> connection.createStatement(0, 0, 0)); // Prepared statements. HelperFunctions.expectFunctionDoesntThrow(() -> connection.prepareStatement(null)); HelperFunctions.expectFunctionThrows(() -> connection.prepareStatement(null, 0)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.prepareStatement(null, 0, 0)); HelperFunctions.expectFunctionThrows(() -> connection.prepareStatement(null, 0, 0, 0)); HelperFunctions.expectFunctionThrows(() -> connection.prepareStatement(null, new int[] {})); HelperFunctions.expectFunctionThrows(() -> connection.prepareStatement(null, new String[] {})); // Callable statements. HelperFunctions.expectFunctionThrows(() -> connection.prepareCall(null)); HelperFunctions.expectFunctionThrows(() -> connection.prepareCall(null, 0, 0)); HelperFunctions.expectFunctionThrows(() -> connection.prepareCall(null, 0, 0, 0)); } @Test void testClientInfo() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(), TEST_PROP_EMPTY); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(null), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setClientInfo(TEST_PROP_KEY, TEST_PROP_VAL)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(TEST_PROP_KEY), TEST_PROP_VAL); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(null), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setClientInfo(null)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(), TEST_PROP_EMPTY); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setClientInfo(TEST_PROP)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(), TEST_PROP); HelperFunctions.expectFunctionThrows(NullPointerException.class, () -> connection.setClientInfo(null, null)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setClientInfo(null)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setClientInfo(TEST_PROP_KEY, null)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getClientInfo(), TEST_PROP_EMPTY); HelperFunctions.expectFunctionDoesntThrow(() -> connection.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setClientInfo(TEST_PROP_KEY_UNSUPPORTED, TEST_PROP_VAL_UNSUPPORTED)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getWarnings(), HelperFunctions.TEST_SQL_WARNING_UNSUPPORTED); HelperFunctions.expectFunctionDoesntThrow(() -> connection.close()); HelperFunctions.expectFunctionThrows(() -> connection.setClientInfo(TEST_PROP_KEY, TEST_PROP_VAL)); HelperFunctions.expectFunctionThrows(() -> connection.setClientInfo(TEST_PROP)); } @Test void testDataTypes() { HelperFunctions.expectFunctionThrows(() -> connection.createBlob()); HelperFunctions.expectFunctionThrows(() -> connection.createClob()); HelperFunctions.expectFunctionThrows(() -> connection.createNClob()); HelperFunctions.expectFunctionThrows(() -> connection.createSQLXML()); HelperFunctions.expectFunctionThrows(() -> connection.createArrayOf(null, new Object[]{})); HelperFunctions.expectFunctionThrows(() -> connection.createStruct(null, new Object[]{})); } @Test void testCatalog() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.getCatalog(), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setCatalog(TEST_CATALOG)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getCatalog(), null); } @Test void testSchema() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.getSchema(), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getSchema(), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setSchema(TEST_SCHEMA)); } @Test void testWarnings() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.getWarnings(), null); HelperFunctions.expectFunctionDoesntThrow(() -> connection.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getWarnings(), null); HelperFunctions.expectFunctionDoesntThrow(() -> ((Connection)connection).addWarning(HelperFunctions.getNewWarning1())); final SQLWarning warning = HelperFunctions.getNewWarning1(); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getWarnings(), warning); warning.setNextWarning(HelperFunctions.getNewWarning2()); HelperFunctions.expectFunctionDoesntThrow(() -> ((Connection)connection).addWarning(HelperFunctions.getNewWarning2())); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getWarnings(), warning); HelperFunctions.expectFunctionDoesntThrow(() -> connection.clearWarnings()); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getWarnings(), null); } @Test void testReadOnly() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.setReadOnly(true)); HelperFunctions.expectFunctionThrows(() -> connection.setReadOnly(false)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.isReadOnly(), true); } @Test void testAutoCommit() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.setAutoCommit(true)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setAutoCommit(false)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getAutoCommit(), true); } @Test void testClosed() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.isClosed(), false); HelperFunctions.expectFunctionDoesntThrow(() -> connection.close()); HelperFunctions.expectFunctionDoesntThrow(() -> connection.isClosed(), true); HelperFunctions.expectFunctionThrows(() -> ((Connection)connection).verifyOpen()); HelperFunctions.expectFunctionDoesntThrow(() -> connection.close()); } @Test void testWrap() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.isWrapperFor(MockConnection.class), true); HelperFunctions.expectFunctionDoesntThrow(() -> connection.isWrapperFor(MockStatement.class), false); HelperFunctions.expectFunctionDoesntThrow(() -> connection.isWrapperFor(null), false); HelperFunctions.expectFunctionDoesntThrow(() -> connection.unwrap(MockConnection.class), connection); HelperFunctions.expectFunctionThrows(() -> connection.unwrap(MockStatement.class)); } @Test void testTypeMap() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.getTypeMap(), new HashMap<>()); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setTypeMap(TEST_TYPE_MAP)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getTypeMap(), TEST_TYPE_MAP); HelperFunctions.expectFunctionDoesntThrow(() -> connection.setTypeMap(null)); HelperFunctions.expectFunctionDoesntThrow(() -> connection.getTypeMap(), new HashMap<>()); } @Test void testNativeSQL() { HelperFunctions.expectFunctionDoesntThrow(() -> connection.nativeSQL(TEST_NATIVE_SQL), TEST_NATIVE_SQL); } }
4,520
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockStatement.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.Statement; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; /** * Mock implementation for Statement object so it can be instantiated and tested. */ public class MockStatement extends Statement implements java.sql.Statement { private ResultSet resultSet = null; /** * Constructor for MockStatement. * @param connection Connection to pass to Statement. */ public MockStatement(final Connection connection) { super(connection); } public void setResultSet(final ResultSet resultSet) { this.resultSet = resultSet; } @Override protected void cancelQuery(final boolean isClosing) throws SQLException { } @Override public ResultSet executeQuery(final String sql) throws SQLException { return resultSet; } @Override public int getQueryTimeout() throws SQLException { return 0; } @Override public void setQueryTimeout(final int seconds) throws SQLException { } }
4,521
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockPreparedStatement.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.PreparedStatement; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; /** * Mock implementation for PreparedStatement object so it can be instantiated and tested. */ public class MockPreparedStatement extends PreparedStatement implements java.sql.PreparedStatement { /** * Constructor for seeding the prepared statement with the parent connection. * @param connection The parent connection. * @param sql The sql query. * @throws SQLException if error occurs when get type map of connection. */ public MockPreparedStatement(final Connection connection, final String sql) { super(connection, sql); } @Override public ResultSet executeQuery() throws SQLException { return null; } @Override public ResultSetMetaData getMetaData() throws SQLException { return null; } @Override protected void cancelQuery(final boolean isClosing) throws SQLException { } @Override public int getQueryTimeout() throws SQLException { return 0; } @Override public void setQueryTimeout(final int seconds) throws SQLException { } }
4,522
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockDriver.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.Driver; import java.sql.Connection; import java.sql.SQLException; import java.util.Properties; /** * Mock implementation for Driver object so it can be instantiated and tested. */ public class MockDriver extends Driver implements java.sql.Driver { @Override public Connection connect(final String url, final Properties info) throws SQLException { return null; } @Override public boolean acceptsURL(final String url) throws SQLException { return false; } }
4,523
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockDatabaseMetadata.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.DatabaseMetaData; import javax.annotation.Nullable; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; /** * Mock implementation for DatabaseMetadata object so it can be instantiated and tested. */ public class MockDatabaseMetadata extends DatabaseMetaData implements java.sql.DatabaseMetaData { /** * MockDatabaseMetadata constructor. * @param connection Connection Object. */ public MockDatabaseMetadata(final Connection connection) { super(connection); } @Override public @Nullable String getURL() throws SQLException { return null; } @Override public @Nullable String getUserName() throws SQLException { return null; } @Override public @Nullable String getDatabaseProductName() throws SQLException { return null; } @Override public @Nullable String getDatabaseProductVersion() throws SQLException { return null; } @Override public @Nullable String getDriverName() throws SQLException { return null; } @Override public @Nullable String getSQLKeywords() throws SQLException { return null; } @Override public @Nullable String getNumericFunctions() throws SQLException { return null; } @Override public @Nullable String getStringFunctions() throws SQLException { return null; } @Override public @Nullable String getSystemFunctions() throws SQLException { return null; } @Override public @Nullable String getTimeDateFunctions() throws SQLException { return null; } @Override public @Nullable String getSearchStringEscape() throws SQLException { return null; } @Override public @Nullable String getExtraNameCharacters() throws SQLException { return null; } @Override public @Nullable String getCatalogTerm() throws SQLException { return null; } @Override public @Nullable String getCatalogSeparator() throws SQLException { return null; } @Override public int getMaxRowSize() throws SQLException { return 0; } @Override public @Nullable ResultSet getProcedures(final String catalog, final String schemaPattern, final String procedureNamePattern) throws SQLException { return null; } @Override public @Nullable ResultSet getTables(final String catalog, final String schemaPattern, final String tableNamePattern, final String[] types) throws SQLException { return null; } @Override public @Nullable ResultSet getSchemas() throws SQLException { return null; } @Override public @Nullable ResultSet getCatalogs() throws SQLException { return null; } @Override public @Nullable ResultSet getTableTypes() throws SQLException { return null; } @Override public @Nullable ResultSet getColumns(final String catalog, final String schemaPattern, final String tableNamePattern, final String columnNamePattern) throws SQLException { return null; } @Override public @Nullable ResultSet getColumnPrivileges(final String catalog, final String schema, final String table, final String columnNamePattern) throws SQLException { return null; } @Override public @Nullable ResultSet getBestRowIdentifier(final String catalog, final String schema, final String table, final int scope, final boolean nullable) throws SQLException { return null; } @Override public @Nullable ResultSet getPrimaryKeys(final String catalog, final String schema, final String table) throws SQLException { return null; } @Override public @Nullable ResultSet getImportedKeys(final String catalog, final String schema, final String table) throws SQLException { return null; } @Override public @Nullable ResultSet getTypeInfo() throws SQLException { return null; } @Override public @Nullable ResultSet getIndexInfo(final String catalog, final String schema, final String table, final boolean unique, final boolean approximate) throws SQLException { return null; } @Override public @Nullable ResultSet getAttributes(final String catalog, final String schemaPattern, final String typeNamePattern, final String attributeNamePattern) throws SQLException { return null; } @Override public int getDatabaseMajorVersion() throws SQLException { return 0; } @Override public int getDatabaseMinorVersion() throws SQLException { return 0; } @Override public int getJDBCMajorVersion() throws SQLException { return 0; } @Override public int getJDBCMinorVersion() throws SQLException { return 0; } @Override public @Nullable ResultSet getSchemas(final String catalog, final String schemaPattern) throws SQLException { return null; } @Override public @Nullable ResultSet getClientInfoProperties() throws SQLException { return null; } }
4,524
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockConnection.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import org.checkerframework.checker.nullness.qual.NonNull; import software.amazon.documentdb.jdbc.common.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.Properties; import java.util.concurrent.Executor; /** * Mock implementation for Connection object so it can be instantiated and tested. */ public class MockConnection extends Connection implements java.sql.Connection { /** * Constructor for MockConnection. * @param connectionProperties Properties to pass to Connection. */ public MockConnection( final @NonNull Properties connectionProperties) { super(connectionProperties); } @Override protected void doClose() { } @Override public DatabaseMetaData getMetaData() throws SQLException { return null; } @Override public Statement createStatement(final int resultSetType, final int resultSetConcurrency) throws SQLException { return null; } @Override public PreparedStatement prepareStatement(final String sql, final int resultSetType, final int resultSetConcurrency) throws SQLException { return null; } @Override public boolean isValid(final int timeout) throws SQLException { return false; } @Override public void setNetworkTimeout(final Executor executor, final int milliseconds) throws SQLException { } @Override public int getNetworkTimeout() throws SQLException { return 0; } @Override public boolean isSupportedProperty(final String name) { return Arrays .stream(MockConnectionProperty.values()) .anyMatch(value -> value.getName().equals(name)); } }
4,525
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockPooledConnection.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.PooledConnection; import java.sql.Connection; import java.sql.SQLException; /** * Mock implementation for PooledConnection object so it can be instantiated and tested. */ public class MockPooledConnection extends PooledConnection implements javax.sql.PooledConnection { /** * MockPooledConnection constructor. * @param connection Connection Object. */ public MockPooledConnection(final Connection connection) { super(connection); } @Override public Connection getConnection() throws SQLException { return null; } }
4,526
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockResultSet.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; /** * Mock implementation for ResultSet object so it can be instantiated and tested. */ public class MockResultSet extends ResultSet implements java.sql.ResultSet { private int rowIndex = 0; private static final int ROW_COUNT = 10; /** * Constructor for MockResultSet. * @param statement Statement Object. */ public MockResultSet(final Statement statement) { super(statement); } @Override protected void doClose() throws SQLException { } @Override protected int getDriverFetchSize() throws SQLException { return 0; } @Override protected void setDriverFetchSize(final int rows) { } @Override protected int getRowIndex() { return rowIndex; } @Override protected int getRowCount() { return ROW_COUNT; } @Override public boolean next() throws SQLException { if (++rowIndex >= ROW_COUNT) { rowIndex = ROW_COUNT; } return (rowIndex < ROW_COUNT); } @Override public boolean wasNull() throws SQLException { return false; } @Override public ResultSetMetaData getMetaData() throws SQLException { return null; } @Override public int findColumn(final String columnLabel) throws SQLException { return 0; } public void setRowIdx(final int idx) { this.rowIndex = idx; } }
4,527
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockConnectionProperty.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.utilities.ConnectionProperty; /** * Mocks a ConnectionProperty implementation. */ public enum MockConnectionProperty implements ConnectionProperty { APPLICATION_NAME(ConnectionProperty.APPLICATION_NAME, "", "Name of the application") ; private final String connectionProperty; private final String defaultValue; private final String description; MockConnectionProperty(final String connectionProperty, final String defaultValue, final String description) { this.connectionProperty = connectionProperty; this.defaultValue = defaultValue; this.description = description; } @Override public String getName() { return connectionProperty; } @Override public String getDefaultValue() { return defaultValue; } @Override public String getDescription() { return description; } }
4,528
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockDataSource.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.DataSource; import javax.sql.PooledConnection; import java.sql.Connection; import java.sql.SQLException; /** * Mock implementation for DataSource object so it can be instantiated and tested. */ public class MockDataSource extends DataSource implements javax.sql.DataSource { @Override public PooledConnection getPooledConnection() throws SQLException { return null; } @Override public PooledConnection getPooledConnection(final String user, final String password) throws SQLException { return null; } @Override public Connection getConnection() throws SQLException { return null; } @Override public Connection getConnection(final String username, final String password) throws SQLException { return null; } @Override public void setLoginTimeout(final int seconds) throws SQLException { } @Override public int getLoginTimeout() throws SQLException { return 0; } }
4,529
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/mock/MockResultSetMetaData.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.mock; import software.amazon.documentdb.jdbc.common.ResultSetMetaData; import java.sql.SQLException; /** * Mock implementation for ResultSetMetaData object so it can be instantiated and tested. */ public class MockResultSetMetaData extends ResultSetMetaData implements java.sql.ResultSetMetaData { @Override public int getColumnCount() throws SQLException { return 0; } @Override public boolean isAutoIncrement(final int column) throws SQLException { return false; } @Override public boolean isCaseSensitive(final int column) throws SQLException { return false; } @Override public boolean isSearchable(final int column) throws SQLException { return false; } @Override public boolean isCurrency(final int column) throws SQLException { return false; } @Override public int isNullable(final int column) throws SQLException { return 0; } @Override public boolean isSigned(final int column) throws SQLException { return false; } @Override public int getColumnDisplaySize(final int column) throws SQLException { return 0; } @Override public String getColumnLabel(final int column) throws SQLException { return null; } @Override public String getColumnName(final int column) throws SQLException { return null; } @Override public String getSchemaName(final int column) throws SQLException { return null; } @Override public int getPrecision(final int column) throws SQLException { return 0; } @Override public int getScale(final int column) throws SQLException { return 0; } @Override public String getTableName(final int column) throws SQLException { return null; } @Override public String getCatalogName(final int column) throws SQLException { return null; } @Override public int getColumnType(final int column) throws SQLException { return 0; } @Override public String getColumnTypeName(final int column) throws SQLException { return null; } @Override public boolean isReadOnly(final int column) throws SQLException { return false; } @Override public boolean isWritable(final int column) throws SQLException { return false; } @Override public boolean isDefinitelyWritable(final int column) throws SQLException { return false; } @Override public String getColumnClassName(final int column) throws SQLException { return null; } }
4,530
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/utilities/LazyLinkedHashMapTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.utilities; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; class LazyLinkedHashMapTest { @DisplayName("Test the size of the map - which should be equal to the number in the keySet.") @Test void testSize() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(keySet.size(), map.size()); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test whether map is empty or not - which should be equal to the number in the keySet.") @Test void testIsEmpty() { LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertFalse(map.isEmpty()); Assertions.assertEquals(0, map.getLazyMapSize()); keySet = new LinkedHashSet<>(); map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertTrue(map.isEmpty()); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test whether map contains a key.") @Test void testContainsKey() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertFalse(map.containsKey("3")); Assertions.assertTrue(map.containsKey("1")); Assertions.assertTrue(map.containsKey("2")); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test the map contains a value - which is not supported.") @Test void testContainsValue() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.containsValue(1)); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test getting a specific value from the map - which will lazy load each value only once.") @Test void testGet() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(null, map.get("3")); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertEquals(1, map.get("1")); Assertions.assertEquals(1, map.getLazyMapSize()); Assertions.assertEquals(2, map.get("2")); Assertions.assertEquals(2, map.getLazyMapSize()); Assertions.assertEquals(1, map.get("1")); Assertions.assertEquals(2, map.getLazyMapSize()); Assertions.assertEquals(2, map.get("2")); Assertions.assertEquals(2, map.getLazyMapSize()); } @DisplayName("Test putting a value in the map - which is not supported.") @Test void testPut() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.put("3", 3)); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test removing a value in the map - which is not supported.") @Test void testRemove() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.remove("2")); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test putting all values in the map - which is not supported.") @Test void testPutAll() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.putAll(new LinkedHashMap<>())); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test clearing the map - which is not supported.") @Test void testClear() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertEquals(2, map.size()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.clear()); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertEquals(2, map.size()); } @DisplayName("Test retrieving the keySet from map - does not retrieve any values.") @Test void testKeySet() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertArrayEquals(keySet.toArray(), map.keySet().toArray(new String[0])); Assertions.assertEquals(2, map.keySet().size()); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test getting all values in the map - which is not supported.") @Test void testValues() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.values()); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Test getting all entries (key/value) in the map - which is not supported.") @Test void testEntrySet() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertThrows(UnsupportedOperationException.class, () -> map.entrySet()); Assertions.assertEquals(0, map.getLazyMapSize()); } @DisplayName("Tests all values using the factory function.") @Test void testValuesWithAllValuesFactory() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2", "3")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt, set -> set.stream().collect(Collectors.toMap( k -> k, k -> Integer.parseInt(k), (o, d) -> o, LinkedHashMap::new))); Assertions.assertEquals(0, map.getLazyMapSize()); final Collection<Integer> values = map.values(); Assertions.assertEquals(keySet.size(), map.getLazyMapSize()); Assertions.assertEquals(keySet.size(), values.size()); } @DisplayName("Tests using the partial remaining values factory function.") @Test void testValuesWithAllValuesFactoryPartial() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2", "3")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt, set -> set.stream().collect(Collectors.toMap( k -> k, k -> Integer.parseInt(k), (o, d) -> o, LinkedHashMap::new))); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertEquals(1, map.get("1")); Assertions.assertEquals(1, map.getLazyMapSize()); final Collection<Integer> values = map.values(); Assertions.assertEquals(keySet.size(), map.getLazyMapSize()); Assertions.assertEquals(keySet.size(), values.size()); } @DisplayName("Tests the entry set with all the remaining values factory function.") @Test void testEntrySetWithAllValuesFactory() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2", "3")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt, set -> set.stream().collect(Collectors.toMap( k -> k, k -> Integer.parseInt(k), (o, d) -> o, LinkedHashMap::new))); Assertions.assertEquals(0, map.getLazyMapSize()); final Set<Entry<String, Integer>> entries = map.entrySet(); Assertions.assertEquals(keySet.size(), map.getLazyMapSize()); Assertions.assertEquals(keySet.size(), entries.size()); } @DisplayName("Tests the entry set with partial remaining values factory function.") @Test void testEntrySetWithAllValuesFactoryPartial() { final LinkedHashSet<String> keySet = new LinkedHashSet<>(Arrays.asList("1", "2", "3")); final LazyLinkedHashMap<String, Integer> map = new LazyLinkedHashMap<>( keySet, Integer::parseInt, set -> set.stream().collect(Collectors.toMap( k -> k, k -> Integer.parseInt(k), (o, d) -> o, LinkedHashMap::new))); Assertions.assertEquals(0, map.getLazyMapSize()); Assertions.assertEquals(1, map.get("1")); Assertions.assertEquals(1, map.getLazyMapSize()); final Set<Entry<String, Integer>> entries = map.entrySet(); Assertions.assertEquals(keySet.size(), map.getLazyMapSize()); Assertions.assertEquals(keySet.size(), entries.size()); } }
4,531
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/common/helpers/HelperFunctions.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.helpers; import org.junit.jupiter.api.Assertions; import software.amazon.documentdb.jdbc.common.utilities.Warning; import java.sql.SQLException; import java.sql.SQLWarning; import java.util.concurrent.atomic.AtomicReference; public class HelperFunctions { public static final String TEST_WARNING_REASON_1 = "warning_1"; public static final String TEST_WARNING_REASON_2 = "warning_2"; public static final String TEST_WARNING_UNSUPPORTED = "unsupported"; public static final String TEST_WARNING_STATE = "state"; public static final SQLWarning TEST_SQL_WARNING_UNSUPPORTED = new SQLWarning(Warning.lookup(Warning.UNSUPPORTED_PROPERTY, TEST_WARNING_UNSUPPORTED)); /** * Simple interface to pass to functions below. * @param <R> Template type. */ public interface VerifyValueInterface<R> { /** * Function to execute. * @return Template type. * @throws SQLException Exception thrown. */ R function() throws SQLException; } /** * Simple interface to pass to functions below. */ public interface VerifyThrowInterface { /** * Function to execute. * @throws SQLException Exception thrown. */ void function() throws SQLException; } /** * Function to verify that function passed in throws an exception. * @param f function to check. */ public static void expectFunctionThrows(final VerifyThrowInterface f) { Assertions.assertThrows(SQLException.class, f::function); } /** * Verifies function passed throws the type exception. * @param type - the type of expected exception. * @param f - the function to execute. * @param <T> - the Throwable type expected. */ public static <T extends Throwable> void expectFunctionThrows(final Class<T> type, final VerifyThrowInterface f) { Assertions.assertThrows(type, f::function); } /** * Function to verify that function passed in doesn't throw an exception. * @param f function to check. */ public static void expectFunctionDoesntThrow(final VerifyThrowInterface f) { Assertions.assertDoesNotThrow(f::function); } /** * Function to verify that function passed in doesn't throw an exception and has correct output value. * @param f function to check. * @param expected expected value. */ public static void expectFunctionDoesntThrow(final VerifyValueInterface<?> f, final Object expected) { final AtomicReference<Object> actual = new AtomicReference<>(); Assertions.assertDoesNotThrow(() -> actual.set(f.function())); if (actual.get() instanceof SQLWarning) { SQLWarning actualWarning = (SQLWarning) actual.get(); SQLWarning expectedWarning = (SQLWarning) expected; do { Assertions.assertNotNull(actualWarning); Assertions.assertEquals(expectedWarning.getMessage(), actualWarning.getMessage()); actualWarning = actualWarning.getNextWarning(); expectedWarning = expectedWarning.getNextWarning(); // Dummy is used because end points to itself infinitely. // Make sure we don't see same warning multiple times in a row. } while (expectedWarning != null); } else { Assertions.assertEquals(expected, actual.get()); } } public static SQLWarning getNewWarning1() { return new SQLWarning(TEST_WARNING_REASON_1, TEST_WARNING_STATE); } public static SQLWarning getNewWarning2() { return new SQLWarning(TEST_WARNING_REASON_2, TEST_WARNING_STATE); } }
4,532
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceLiteralTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import org.bson.BsonDateTime; import org.bson.BsonDocument; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.types.ObjectId; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import java.sql.SQLException; import java.time.Instant; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryMappingServiceLiteralTest extends DocumentDbQueryMappingServiceTest { private static final String OBJECT_ID_COLLECTION_NAME = "objectIdCollection"; private static final BsonObjectId BSON_OBJECT_ID = new BsonObjectId( new ObjectId("123456789012345678901234")); private static DocumentDbQueryMappingService queryMapper; @BeforeAll void initialize() throws SQLException { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101}"); doc1.append("field", new BsonDateTime(dateTime)); final BsonDocument objectIdDocument = new BsonDocument("_id", BSON_OBJECT_ID) .append("field", new BsonString("value")) .append("dateField", new BsonDateTime(dateTime)); insertBsonDocuments(OBJECT_ID_COLLECTION_NAME, new BsonDocument[]{objectIdDocument}); queryMapper = getQueryMappingService(); } @Test @DisplayName("Tests that querying for ObjectId type.") void testWhereQueryForObjectId() throws SQLException { final String query1 = String.format("SELECT %2$s__id FROM %1$s.%2$s WHERE %2$s__id = '%3$s'", getDatabaseName(), OBJECT_ID_COLLECTION_NAME, BSON_OBJECT_ID.getValue().toHexString()); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(2, result1.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"objectIdCollection__id\": \"$_id\", \"_id\": 0}}"), result1.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"objectIdCollection__id\": {\"$eq\": {\"$oid\": \"123456789012345678901234\"}}}, " + "{\"objectIdCollection__id\": {\"$eq\": \"123456789012345678901234\"}}]}}"), result1.getAggregateOperations().get(1)); // In-memory substring and concatenation. final String query2 = String.format("SELECT %2$s__id FROM %1$s.%2$s WHERE %2$s__id =" + " CONCAT(SUBSTRING('%3$s', 1, 10), SUBSTRING('%3$s', 11))", getDatabaseName(), OBJECT_ID_COLLECTION_NAME, BSON_OBJECT_ID.getValue().toHexString()); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(2, result2.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"objectIdCollection__id\": \"$_id\", \"_id\": 0}}"), result1.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"objectIdCollection__id\": {\"$eq\": {\"$oid\": \"123456789012345678901234\"}}}, " + "{\"objectIdCollection__id\": {\"$eq\": \"123456789012345678901234\"}}]}}"), result1.getAggregateOperations().get(1)); // Hex string final String query3 = String.format("SELECT %2$s__id FROM %1$s.%2$s WHERE %2$s__id =" + " x'%3$s'", getDatabaseName(), OBJECT_ID_COLLECTION_NAME, BSON_OBJECT_ID.getValue().toHexString()); final DocumentDbMqlQueryContext result3 = queryMapper.get(query3); Assertions.assertNotNull(result3); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(1, result3.getColumnMetaData().size()); Assertions.assertEquals(2, result3.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"objectIdCollection__id\": \"$_id\", \"_id\": 0}}"), result1.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"objectIdCollection__id\": {\"$eq\": {\"$oid\": \"123456789012345678901234\"}}}, " + "{\"objectIdCollection__id\": {\"$eq\": {\"$binary\": {\"base64\": \"EjRWeJASNFZ4kBI0\", \"subType\": \"00\"}}}}]}}"), result3.getAggregateOperations().get(1)); // String final String query4 = String.format("SELECT %2$s__id FROM %1$s.%2$s WHERE %2$s__id =" + " 'arbitrary string'", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result4 = queryMapper.get(query4); Assertions.assertNotNull(result4); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result4.getCollectionName()); Assertions.assertEquals(1, result4.getColumnMetaData().size()); Assertions.assertEquals(2, result4.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"objectIdCollection__id\": \"$_id\", \"_id\": 0}}"), result4.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"objectIdCollection__id\": {\"$eq\": \"arbitrary string\"}}}"), result4.getAggregateOperations().get(1)); // Long integer final String query5 = String.format("SELECT %2$s__id FROM %1$s.%2$s WHERE %2$s__id =" + " 4223372036854775807", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result5 = queryMapper.get(query5); Assertions.assertNotNull(result5); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result5.getCollectionName()); Assertions.assertEquals(1, result5.getColumnMetaData().size()); Assertions.assertEquals(2, result5.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"objectIdCollection__id\": \"$_id\", \"_id\": 0}}"), result5.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"objectIdCollection__id\": {\"$eq\": 4223372036854775807}}}"), result5.getAggregateOperations().get(1)); // Byte array final String query6 = String.format("SELECT %2$s__id FROM %1$s.%2$s WHERE %2$s__id =" + " x'0123456789abcdef'", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result6 = queryMapper.get(query6); Assertions.assertNotNull(result6); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result6.getCollectionName()); Assertions.assertEquals(1, result6.getColumnMetaData().size()); Assertions.assertEquals(2, result6.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"objectIdCollection__id\": \"$_id\", \"_id\": 0}}"), result1.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"objectIdCollection__id\": {\"$eq\": {\"$binary\": {\"base64\": \"ASNFZ4mrze8=\", \"subType\": \"00\"}}}}}"), result6.getAggregateOperations().get(1)); } @Test @DisplayName("Tests querying for ObjectId type in SELECT clause.") void testSelectQueryForObjectId() throws SQLException { final String query1 = String.format("SELECT %2$s__id = '%3$s' FROM %1$s.%2$s", getDatabaseName(), OBJECT_ID_COLLECTION_NAME, BSON_OBJECT_ID.getValue().toHexString()); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$cond\": [{" + "\"$and\": [" + "{\"$gt\": [\"$_id\", null]}, " + "{\"$gt\": [{\"$literal\": \"123456789012345678901234\"}, null]}]}, " + "{\"$or\": [" + "{\"$eq\": [\"$_id\", {\"$oid\": \"123456789012345678901234\"}]}, " + "{\"$eq\": [\"$_id\", {\"$literal\": \"123456789012345678901234\"}]}]}, null]}, " + "\"_id\": 0}}"), result1.getAggregateOperations().get(0)); // Hex string final String query2 = String.format("SELECT %2$s__id =x'%3$s' FROM %1$s.%2$s", getDatabaseName(), OBJECT_ID_COLLECTION_NAME, BSON_OBJECT_ID.getValue().toHexString()); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$cond\": [" + "{\"$and\": [" + "{\"$gt\": [\"$_id\", null]}, " + "{\"$gt\": [{\"$binary\": {\"base64\": \"EjRWeJASNFZ4kBI0\", \"subType\": \"00\"}}, null]}]}, " + "{\"$or\": [" + "{\"$eq\": [\"$_id\", {\"$oid\": \"123456789012345678901234\"}]}, " + "{\"$eq\": [\"$_id\", {\"$binary\": {\"base64\": \"EjRWeJASNFZ4kBI0\", \"subType\": \"00\"}}]}]}, null]}, " + "\"_id\": 0}}"), result2.getAggregateOperations().get(0)); // String final String query3 = String.format("SELECT %2$s__id = 'arbitrary string' FROM %1$s.%2$s", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result3 = queryMapper.get(query3); Assertions.assertNotNull(result3); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(1, result3.getColumnMetaData().size()); Assertions.assertEquals(1, result3.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$cond\": [{\"$and\": [" + "{\"$gt\": [\"$_id\", null]}, " + "{\"$gt\": [{\"$literal\": \"arbitrary string\"}, null]}]}, " + "{\"$eq\": [\"$_id\", {\"$literal\": \"arbitrary string\"}]}, null]}, " + "\"_id\": 0}}"), result3.getAggregateOperations().get(0)); // Long integer final String query4 = String.format("SELECT %2$s__id = 4223372036854775807 FROM %1$s.%2$s", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result4 = queryMapper.get(query4); Assertions.assertNotNull(result4); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result4.getCollectionName()); Assertions.assertEquals(1, result4.getColumnMetaData().size()); Assertions.assertEquals(1, result4.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$cond\": [" + "{\"$and\": [" + "{\"$gt\": [\"$_id\", null]}, " + "{\"$gt\": [{\"$literal\": {\"$numberLong\": \"4223372036854775807\"}}, null]}]}, " + "{\"$eq\": [\"$_id\", {\"$literal\": {\"$numberLong\": \"4223372036854775807\"}}]}, null]}, \"_id\": 0}}"), result4.getAggregateOperations().get(0)); // Byte array final String query5 = String.format("SELECT %2$s__id = x'0123456789abcdef' FROM %1$s.%2$s", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result5 = queryMapper.get(query5); Assertions.assertNotNull(result5); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result5.getCollectionName()); Assertions.assertEquals(1, result5.getColumnMetaData().size()); Assertions.assertEquals(1, result5.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$cond\": [" + "{\"$and\": [" + "{\"$gt\": [\"$_id\", null]}, " + "{\"$gt\": [{\"$binary\": {\"base64\": \"ASNFZ4mrze8=\", \"subType\": \"00\"}}, null]}]}, " + "{\"$eq\": [\"$_id\", {\"$binary\": {\"base64\": \"ASNFZ4mrze8=\", \"subType\": \"00\"}}]}, null]}, " + "\"_id\": 0}}"), result5.getAggregateOperations().get(0)); } @Test @DisplayName("Tests that all supported literal types are returned correctly.") void testLiteralTypes() throws SQLException { // Boolean literals final String query1 = String.format( "SELECT TRUE AS \"literalTrue\", " + "FALSE AS \"literalFalse\", " + "UNKNOWN AS \"literalUnknown\" " + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(3, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalTrue\": {\"$literal\": true}, " + "\"literalFalse\": {\"$literal\": false}, " + "\"literalUnknown\": null, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); // Numeric literals final String query2 = String.format( "SELECT CAST(-128 AS TINYINT) AS \"literalTinyInt\", " + "CAST(-32768 AS SMALLINT) AS \"literalSmallInt\", " + "CAST(-2147483648 AS INT) AS \"literalInt\", " + "CAST(-9223372036854775808 AS BIGINT) AS \"literalBigInt\", " + "CAST(1234567890.45 AS DECIMAL(100, 2)) AS \"literalDecimal\", " + "CAST(9876543210.45 AS NUMERIC(100, 2)) AS \"literalNumeric\", " + "CAST(1234.56 AS FLOAT) AS \"literalFloat\", " + "CAST(12345.678 AS REAL) AS \"literalReal\", " + "CAST(12345.6789999999999 AS DOUBLE) AS \"literalDouble\"" + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(9, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalTinyInt\": {\"$literal\": {\"$numberInt\": \"-128\"}}, " + "\"literalSmallInt\": {\"$literal\": {\"$numberInt\": \"-32768\"}}, " + "\"literalInt\": {\"$literal\": {\"$numberInt\": \"-2147483648\"}}, " + "\"literalBigInt\": {\"$literal\": {\"$numberLong\": \"-9223372036854775808\"}}, " + "\"literalDecimal\": {\"$literal\": {\"$numberDecimal\": \"1234567890.45\"}}, " + "\"literalNumeric\": {\"$literal\": {\"$numberDecimal\": \"9876543210.45\"}}, " + "\"literalFloat\": {\"$literal\": {\"$numberDouble\": \"1234.56\"}}, " + "\"literalReal\": {\"$literal\": {\"$numberDouble\": \"12345.678\"}}, " + "\"literalDouble\": {\"$literal\": {\"$numberDouble\": \"12345.679\"}}, " + "\"_id\": 0}}"), result2.getAggregateOperations().get(0)); // Numeric literals as strings final String query7 = String.format( "SELECT CAST(-128 AS TINYINT) AS \"literalTinyInt\", " + "CAST('-32768' AS SMALLINT) AS \"literalSmallInt\", " + "CAST('-2147483648' AS INT) AS \"literalInt\", " + "CAST('-9223372036854775808' AS BIGINT) AS \"literalBigInt\", " + "CAST('123456789012345678901234567890.45' AS DECIMAL(100, 2)) AS \"literalDecimal\", " + "CAST('987654321098765432109876543210.45' AS NUMERIC(100, 2)) AS \"literalNumeric\", " + "CAST('1234.56' AS FLOAT) AS \"literalFloat\", " + "CAST('12345.678' AS REAL) AS \"literalReal\", " + "CAST('12345.6789999999999' AS DOUBLE) AS \"literalDouble\"" + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result7 = queryMapper.get(query7); Assertions.assertNotNull(result7); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result7.getCollectionName()); Assertions.assertEquals(9, result7.getColumnMetaData().size()); Assertions.assertEquals(1, result7.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalTinyInt\": {\"$literal\": {\"$numberInt\": \"-128\"}}, " + "\"literalSmallInt\": {\"$literal\": {\"$numberInt\": \"-32768\"}}, " + "\"literalInt\": {\"$literal\": {\"$numberInt\": \"-2147483648\"}}, " + "\"literalBigInt\": {\"$literal\": {\"$numberLong\": \"-9223372036854775808\"}}, " + "\"literalDecimal\": {\"$literal\": {\"$numberDecimal\": \"123456789012345678901234567890.45\"}}, " + "\"literalNumeric\": {\"$literal\": {\"$numberDecimal\": \"987654321098765432109876543210.45\"}}, " + "\"literalFloat\": {\"$literal\": {\"$numberDouble\": \"1234.56\"}}, " + "\"literalReal\": {\"$literal\": {\"$numberDouble\": \"12345.677734375\"}}, " + "\"literalDouble\": {\"$literal\": {\"$numberDouble\": \"12345.679\"}}, " + "\"_id\": 0}}"), result7.getAggregateOperations().get(0)); // String literals final String query3 = String.format( "SELECT CAST('Hello' AS CHAR(5)) AS \"literalChar\", " + "CAST('' AS CHAR(5)) AS \"literalCharEmpty\", " + "CAST('Hello' AS VARCHAR) AS \"literalVarchar\", " + "CAST('' AS VARCHAR) AS \"literalVarcharEmpty\" " + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result3 = queryMapper.get(query3); Assertions.assertNotNull(result3); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(4, result3.getColumnMetaData().size()); Assertions.assertEquals(1, result3.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalChar\": {\"$literal\": \"Hello\"}, " + "\"literalCharEmpty\": {\"$literal\": \" \"}, " + "\"literalVarchar\": {\"$literal\": \"Hello\"}, " + "\"literalVarcharEmpty\": {\"$literal\": \"\"}, " + "\"_id\": 0}}"), result3.getAggregateOperations().get(0)); // Binary literals final String query4 = String.format( "SELECT CAST(x'45F0AB' AS BINARY(3)) AS \"literalBinary\", " + "CAST(x'' AS BINARY(3)) AS \"literalBinaryEmpty\", " + "CAST(x'45F0AB' AS VARBINARY) AS \"literalVarbinary\", " + "CAST(x'' AS VARBINARY) AS \"literalVarbinaryEmpty\" " + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result4 = queryMapper.get(query4); Assertions.assertNotNull(result4); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result4.getCollectionName()); Assertions.assertEquals(4, result4.getColumnMetaData().size()); Assertions.assertEquals(1, result4.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalBinary\": {\"$binary\": {\"base64\": \"RfCr\", \"subType\": \"00\"}}, " + "\"literalBinaryEmpty\": {\"$binary\": {\"base64\": \"AAAA\", \"subType\": \"00\"}}, " + "\"literalVarbinary\": {\"$binary\": {\"base64\": \"RfCr\", \"subType\": \"00\"}}, " + "\"literalVarbinaryEmpty\": {\"$binary\": {\"base64\": \"\", \"subType\": \"00\"}}, " + "\"_id\": 0}}"), result4.getAggregateOperations().get(0)); // Date/time literals final String query5 = String.format( "SELECT TIME '20:17:40' AS \"literalTime\", " + "DATE '2017-09-20' AS \"literalDate\", " + "TIMESTAMP '2017-09-20 20:17:40' AS \"literalTimestamp\"" + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result5 = queryMapper.get(query5); Assertions.assertNotNull(result5); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result5.getCollectionName()); Assertions.assertEquals(3, result5.getColumnMetaData().size()); Assertions.assertEquals(1, result5.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalTime\": {\"$date\": {\"$numberLong\": \"73060000\" }}, " + "\"literalDate\": {\"$date\": {\"$numberLong\": \"1505865600000\" }}, " + "\"literalTimestamp\": {\"$date\": {\"$numberLong\": \"1505938660000\" }}, " + "\"_id\": 0}}"), result5.getAggregateOperations().get(0)); // Interval literals final String query6 = String.format( "SELECT INTERVAL '123-2' YEAR(3) TO MONTH AS \"literalYearToMonth\", " + "INTERVAL '123' YEAR(3) AS \"literalYear\", " + "INTERVAL 300 MONTH(3) AS \"literalMonth\", " + "INTERVAL '400' DAY(3) AS \"literalDay\", " + "INTERVAL '400 5' DAY(3) TO HOUR AS \"literalDayToHour\", " + "INTERVAL '4 5:12' DAY TO MINUTE AS \"literalDayToMinute\", " + "INTERVAL '4 5:12:10.789' DAY TO SECOND AS \"literalDayToSecond\", " + "INTERVAL '10' HOUR AS \"literalHour\", " + "INTERVAL '11:20' HOUR TO MINUTE AS \"literalHourToMinute\", " + "INTERVAL '11:20:10' HOUR TO SECOND AS \"literalHourToSecond\", " + "INTERVAL '10' MINUTE AS \"literalMinute\", " + "INTERVAL '10:22' MINUTE TO SECOND AS \"literalMinuteToSecond\", " + "INTERVAL '30' SECOND AS \"literalSecond\"" + "FROM \"%s\".\"%s\"", getDatabaseName(), OBJECT_ID_COLLECTION_NAME); final DocumentDbMqlQueryContext result6 = queryMapper.get(query6); Assertions.assertNotNull(result6); Assertions.assertEquals(OBJECT_ID_COLLECTION_NAME, result6.getCollectionName()); Assertions.assertEquals(13, result6.getColumnMetaData().size()); Assertions.assertEquals(1, result6.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"literalYearToMonth\": {\"$literal\": {\"$numberLong\": \"1478\"}}, " + "\"literalYear\": {\"$literal\": {\"$numberLong\": \"1476\"}}, " + "\"literalMonth\": {'$multiply': [{\"$literal\": {\"$numberInt\": \"300\"}}, {\"$literal\": {\"$numberLong\": \"1\" }}]}, " + "\"literalDay\": {\"$literal\": {\"$numberLong\": \"34560000000\"}}, " + "\"literalDayToHour\": {\"$literal\": {\"$numberLong\": \"34578000000\"}}, " + "\"literalDayToMinute\": {\"$literal\": {\"$numberLong\": \"364320000\"}}, " + "\"literalDayToSecond\": {\"$literal\": {\"$numberLong\": \"364330789\"}}, " + "\"literalHour\": {\"$literal\": {\"$numberLong\": \"36000000\"}}, " + "\"literalHourToMinute\": {\"$literal\": {\"$numberLong\": \"40800000\"}}, " + "\"literalHourToSecond\": {\"$literal\": {\"$numberLong\": \"40810000\"}}, " + "\"literalMinute\": {\"$literal\": {\"$numberLong\": \"600000\"}}, " + "\"literalMinuteToSecond\": {\"$literal\": {\"$numberLong\": \"622000\"}}, " + "\"literalSecond\": {\"$literal\": {\"$numberLong\": \"30000\"}}, " + "_id: 0}}"), result6.getAggregateOperations().get(0)); } }
4,533
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceStringTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import java.sql.SQLException; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryMappingServiceStringTest extends DocumentDbQueryMappingServiceTest { private static final String COLLECTION_NAME = "testCollection"; private static DocumentDbQueryMappingService queryMapper; @BeforeAll void initialize() throws SQLException { final BsonDocument document = BsonDocument.parse("{ \"_id\" : \"key\", \"field\": \"Hello, world!\"}"); insertBsonDocuments(COLLECTION_NAME, new BsonDocument[] {document}); queryMapper = getQueryMappingService(); } @Test @DisplayName("Test queries with UPPER() and fn-escaped UCASE().") void testQueryWithUpper() throws SQLException { final String query1 = String.format( "SELECT UPPER(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$toUpper\": \"$field\"}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT {fn UCASE(\"field\")} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$toUpper\": \"$field\"}, null]}, \"_id\": 0}}"), result2.getAggregateOperations().get(0)); } @Test @DisplayName("Test queries with LOWER() and fn-escaped LCASE().") void testQueryWithLower() throws SQLException { final String query1 = String.format( "SELECT LOWER(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$toLower\": \"$field\"}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT {fn LCASE(\"field\")} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$toLower\": \"$field\"}, null]}, \"_id\": 0}}"), result2.getAggregateOperations().get(0)); } @Test @DisplayName("Test queries with CHAR_LENGTH(), CHARACTER_LENGTH() & fn-escaped LENGTH().") void testQueryWithCharLength() throws SQLException { final String query1 = String.format( "SELECT CHAR_LENGTH(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$strLenCP\": \"$field\"}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT CHARACTER_LENGTH(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$strLenCP\": \"$field\"}, null]}, \"_id\": 0}}"), result2.getAggregateOperations().get(0)); final String query3 = String.format( "SELECT {fn LENGTH(\"field\")} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result3 = queryMapper.get(query3); Assertions.assertNotNull(result3); Assertions.assertEquals(COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(1, result3.getColumnMetaData().size()); Assertions.assertEquals(1, result3.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$field\", null]}]}, {\"$strLenCP\": \"$field\"}, null]}, \"_id\": 0}}"), result3.getAggregateOperations().get(0)); } @Test @DisplayName("Test queries with string concatenation using ||, CONCAT() and fn-escaped CONCAT().") void testQueryWithStringConcatenation() throws SQLException { final String query1 = String.format( "SELECT CONCAT(\"field\", '!!!') FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$concat\": [{\"$ifNull\": [\"$field\", \"\"]}, {\"$ifNull\": [{\"$literal\": \"!!!\"}, \"\"]}]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT \"field\" || '!!!' FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$concat\": [\"$field\", {\"$literal\": \"!!!\"}]}, \"_id\": 0}}"), result2.getAggregateOperations().get(0)); final String query3 = String.format( "SELECT {fn CONCAT(\"field\", '!!!')} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result3 = queryMapper.get(query3); Assertions.assertNotNull(result3); Assertions.assertEquals(COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(1, result3.getColumnMetaData().size()); Assertions.assertEquals(1, result3.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$concat\": [\"$field\", {\"$literal\": \"!!!\"}]}, \"_id\": 0}}"), result3.getAggregateOperations().get(0)); } @Test @DisplayName("Test queries with POSITION() & fn-escaped LOCATE() with 2 and 3 arguments") void testQueryWithPosition() throws SQLException { final String query1 = String.format( "SELECT POSITION('world' IN \"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$literal\": \"world\"}, null]}, {\"$gt\": [\"$field\", null]}]}, {\"$add\": [{\"$indexOfCP\": [{\"$toLower\": \"$field\"}, {\"$toLower\": {\"$literal\": \"world\"}}]}, 1]}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT POSITION('world' IN \"field\" FROM 3) FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$literal\": \"world\"}, null]}, {\"$gt\": [\"$field\", null]}, {\"$gt\": [{\"$literal\": 3}, null]}]}, {\"$cond\": [{\"$lte\": [{\"$literal\": 3}, 0]}, 0, {\"$add\": [{\"$indexOfCP\": [{\"$toLower\": \"$field\"}, {\"$toLower\": {\"$literal\": \"world\"}}, {\"$subtract\": [{\"$literal\": 3}, 1]}]}, 1]}]}, null]}, \"_id\": 0}}"), result2.getAggregateOperations().get(0)); final String query3 = String.format( "SELECT {fn LOCATE('world', \"field\", 3)} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result3 = queryMapper.get(query3); Assertions.assertNotNull(result3); Assertions.assertEquals(COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(1, result3.getColumnMetaData().size()); Assertions.assertEquals(1, result3.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$literal\": \"world\"}, null]}, {\"$gt\": [\"$field\", null]}, {\"$gt\": [{\"$literal\": 3}, null]}]}, {\"$cond\": [{\"$lte\": [{\"$literal\": 3}, 0]}, 0, {\"$add\": [{\"$indexOfCP\": [{\"$toLower\": \"$field\"}, {\"$toLower\": {\"$literal\": \"world\"}}, {\"$subtract\": [{\"$literal\": 3}, 1]}]}, 1]}]}, null]}, \"_id\": 0}}"), result3.getAggregateOperations().get(0)); } @Test @DisplayName("Test queries with LEFT() and fn-escaped LEFT().") void testQueryWithLeft() throws SQLException { final String query1 = String.format( "SELECT LEFT(\"field\", 5) FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$and\": [{\"$gt\": [\"$field\", null]}, {\"$gt\": [{\"$literal\": 5}, null]}]}, {\"$gte\": [{\"$literal\": 5}, 0]}]}, {\"$substrCP\": [\"$field\", 0, {\"$literal\": 5}]}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT {fn LEFT(\"field\", 5)} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$and\": [{\"$gt\": [\"$field\", null]}, {\"$gt\": [{\"$literal\": 5}, null]}]}, {\"$gte\": [{\"$literal\": 5}, 0]}]}, {\"$substrCP\": [\"$field\", 0, {\"$literal\": 5}]}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); } @Test @DisplayName("Test queries with RIGHT() and fn-escaped RIGHT().") void testQueryWithRight() throws SQLException { final String query1 = String.format( "SELECT RIGHT(\"field\", 5) FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(query1); Assertions.assertNotNull(result1); Assertions.assertEquals(COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$and\": [{\"$gt\": [\"$field\", null]}, {\"$gt\": [{\"$literal\": 5}, null]}]}, {\"$gte\": [{\"$literal\": 5}, 0]}]}, {\"$cond\": [{\"$lte\": [{\"$strLenCP\": \"$field\"}, {\"$literal\": 5}]}, \"$field\", {\"$substrCP\": [\"$field\", {\"$subtract\": [{\"$strLenCP\": \"$field\"}, {\"$literal\": 5}]}, {\"$literal\": 5}]}]}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); final String query2 = String.format( "SELECT {fn RIGHT(\"field\", 5)} FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(query2); Assertions.assertNotNull(result2); Assertions.assertEquals(COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$and\": [{\"$gt\": [\"$field\", null]}, {\"$gt\": [{\"$literal\": 5}, null]}]}, {\"$gte\": [{\"$literal\": 5}, 0]}]}, {\"$cond\": [{\"$lte\": [{\"$strLenCP\": \"$field\"}, {\"$literal\": 5}]}, \"$field\", {\"$substrCP\": [\"$field\", {\"$subtract\": [{\"$strLenCP\": \"$field\"}, {\"$literal\": 5}]}, {\"$literal\": 5}]}]}, null]}, \"_id\": 0}}"), result1.getAggregateOperations().get(0)); } }
4,534
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceDateTimeTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import org.bson.BsonDateTime; import org.bson.BsonDocument; import org.bson.conversions.Bson; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbFilter; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import java.sql.SQLException; import java.time.Instant; import java.util.List; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryMappingServiceDateTimeTest extends DocumentDbQueryMappingServiceTest { private static final String DATE_COLLECTION_NAME = "dateTestCollection"; private static DocumentDbQueryMappingService queryMapper; @BeforeAll void initialize() throws SQLException { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = BsonDocument.parse("{\"_id\": 101}"); document.append("field", new BsonDateTime(dateTime)); insertBsonDocuments(DATE_COLLECTION_NAME, new BsonDocument[]{document}); queryMapper = getQueryMappingService(); } /** * Tests TIMESTAMPADD() and EXTRACT(). * @throws SQLException occurs if query fails. */ @Test @DisplayName("Tests TIMESTAMPADD() and EXTRACT().") void testDateFunctions() throws SQLException { final String timestampAddQuery = String.format( "SELECT " + "TIMESTAMPADD(WEEK, 1, \"field\"), " + "TIMESTAMPADD(DAY, 2, \"field\"), " + "TIMESTAMPADD(HOUR, 3, \"field\"), " + "TIMESTAMPADD(MINUTE, 4, \"field\"), " + "TIMESTAMPADD(SECOND, 5, \"field\"), " + "TIMESTAMPADD(MICROSECOND, 6, \"field\") " + "FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); DocumentDbMqlQueryContext result = queryMapper.get(timestampAddQuery); Assertions.assertNotNull(result); Assertions.assertEquals(DATE_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(6, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$add\": [\"$field\", {\"$multiply\": [{\"$literal\": 604800000}, {\"$literal\": 1}]}]}, " + "\"EXPR$1\": {\"$add\": [\"$field\", {\"$multiply\": [{\"$literal\": 86400000}, {\"$literal\": 2}]}]}, " + "\"EXPR$2\": {\"$add\": [\"$field\", {\"$multiply\": [{\"$literal\": 3600000}, {\"$literal\": 3}]}]}, " + "\"EXPR$3\": {\"$add\": [\"$field\", {\"$multiply\": [{\"$literal\": 60000}, {\"$literal\": 4}]}]}, " + "\"EXPR$4\": {\"$add\": [\"$field\", {\"$multiply\": [{\"$literal\": 1000}, {\"$literal\": 5}]}]}, " + "\"EXPR$5\": {\"$add\": [\"$field\", {\"$divide\": [{\"$subtract\": [{\"$multiply\": [{\"$literal\": 1}, {\"$literal\": 6}]}, {\"$mod\": [{\"$multiply\": [{\"$literal\": 1}, {\"$literal\": 6}]}, {\"$literal\": 1000}]}]}, {\"$literal\": 1000}]}]}, " + "\"_id\": 0}}").toJson(), ((BsonDocument) result.getAggregateOperations().get(0)).toJson()); final String extractQuery = String.format( "SELECT " + "YEAR(\"field\"), " + "MONTH(\"field\")," + "WEEK(\"field\")," + "DAYOFMONTH(\"field\")," + "DAYOFWEEK(\"field\")," + "DAYOFYEAR(\"field\")," + "HOUR(\"field\")," + "MINUTE(\"field\")," + "SECOND(\"field\")," + "QUARTER(\"field\")" + "FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); result = queryMapper.get(extractQuery); Assertions.assertNotNull(result); Assertions.assertEquals(DATE_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(10, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\":" + " {\"EXPR$0\": {\"$year\": \"$field\"}," + " \"EXPR$1\": {\"$month\": \"$field\"}," + " \"EXPR$2\": {\"$week\": \"$field\"}," + " \"EXPR$3\": {\"$dayOfMonth\": \"$field\"}," + " \"EXPR$4\": {\"$dayOfWeek\": \"$field\"}," + " \"EXPR$5\": {\"$dayOfYear\": \"$field\"}," + " \"EXPR$6\": {\"$hour\": \"$field\"}," + " \"EXPR$7\": {\"$minute\": \"$field\"}," + " \"EXPR$8\": {\"$second\": \"$field\"}," + " \"EXPR$9\":" + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 3]}, 1," + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 6]}, 2," + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 9]}, 3," + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 12]}, 4," + " null]}]}]}]}, " + "\"_id\": 0}}"), result.getAggregateOperations().get(0)); final String timestampDiffQuery = String.format( "SELECT " + "TIMESTAMPDIFF(WEEK, \"field\", \"field\"), " + "TIMESTAMPDIFF(DAY, \"field\", \"field\"), " + "TIMESTAMPDIFF(HOUR, \"field\", \"field\"), " + "TIMESTAMPDIFF(MINUTE, \"field\", \"field\"), " + "TIMESTAMPDIFF(SECOND, \"field\", \"field\"), " + "TIMESTAMPDIFF(MICROSECOND, \"field\", \"field\"), " + "TIMESTAMPDIFF(YEAR, \"field\", \"field\"), " + "TIMESTAMPDIFF(QUARTER, \"field\", \"field\"), " + "TIMESTAMPDIFF(MONTH, \"field\", \"field\")" + "FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); result = queryMapper.get(timestampDiffQuery); Assertions.assertNotNull(result); Assertions.assertEquals(DATE_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(9, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\":" + " {\"EXPR$0\": {\"$divide\": [{\"$subtract\": [{\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 1000}]}]}, {\"$literal\": 1000}]}, {\"$mod\": [{\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 1000}]}]}, {\"$literal\": 1000}]}, {\"$literal\": 604800}]}]}, {\"$literal\": 604800}]}," + " \"EXPR$1\": {\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 86400000}]}]}, {\"$literal\": 86400000}]}," + " \"EXPR$2\": {\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 3600000}]}]}, {\"$literal\": 3600000}]}," + " \"EXPR$3\": {\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 60000}]}]}, {\"$literal\": 60000}]}," + " \"EXPR$4\": {\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 1000}]}]}, {\"$literal\": 1000}]}," + " \"EXPR$5\": {\"$multiply\": [{\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 1000}]}]}, {\"$literal\": 1000}]}, {\"$literal\": 1000000}]}," + " \"EXPR$6\": {\"$subtract\": [{\"$year\": \"$field\"}, {\"$year\": \"$field\"}]}," + " \"EXPR$7\": {\"$subtract\": [" + " {\"$add\": [{\"$multiply\": [4, {\"$year\": \"$field\"}]}, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 3]}, 1, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 6]}, 2, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 9]}, 3, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 12]}, 4, null]}]}]}]}]}," + " {\"$add\": [{\"$multiply\": [4, {\"$year\": \"$field\"}]}, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 3]}, 1, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 6]}, 2, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 9]}, 3, {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 12]}, 4, null]}]}]}]}]}]}," + " \"EXPR$8\": {\"$subtract\": [{\"$add\": [{\"$multiply\": [12, {\"$year\": \"$field\"}]}, {\"$month\": \"$field\"}]}, {\"$add\": [{\"$multiply\": [12, {\"$year\": \"$field\"}]}, {\"$month\": \"$field\"}]}]}, " + " \"_id\": 0}}"), result.getAggregateOperations().get(0)); } /** * Tests CURRENT_TIMESTAMP, CURRENT_DATE, and CURRENT_TIME. * @throws SQLException occurs if query fails. */ @Test @DisplayName("Tests CURRENT_TIMESTAMP, CURRENT_DATE, and CURRENT_TIME.") void testCurrentTimestampFunctions() throws SQLException { final String currentTimestampQuery = String.format( "SELECT CURRENT_TIMESTAMP AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext result1 = queryMapper.get(currentTimestampQuery); Assertions.assertNotNull(result1); Assertions.assertEquals(DATE_COLLECTION_NAME, result1.getCollectionName()); Assertions.assertEquals(1, result1.getColumnMetaData().size()); Assertions.assertEquals(1, result1.getAggregateOperations().size()); BsonDocument rootDoc = result1.getAggregateOperations() .get(0).toBsonDocument(BsonDocument.class, null); Assertions.assertNotNull(rootDoc); BsonDocument addFieldsDoc = rootDoc.getDocument("$project"); Assertions.assertNotNull(addFieldsDoc); BsonDateTime cstDateTime = addFieldsDoc.getDateTime("cts"); Assertions.assertNotNull(cstDateTime); BsonDocument expectedDoc = BsonDocument.parse( "{\"$project\": " + "{\"cts\": " + "{\"$date\": " + "{\"$numberLong\": " + "\"" + cstDateTime.getValue() + "\"" + "}}, \"_id\": 0}}"); Assertions.assertEquals( expectedDoc, result1.getAggregateOperations().get(0)); final String currentDateQuery = String.format( "SELECT CURRENT_DATE AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext result2 = queryMapper.get(currentDateQuery); Assertions.assertNotNull(result2); Assertions.assertEquals(DATE_COLLECTION_NAME, result2.getCollectionName()); Assertions.assertEquals(1, result2.getColumnMetaData().size()); Assertions.assertEquals(1, result2.getAggregateOperations().size()); rootDoc = result2.getAggregateOperations() .get(0).toBsonDocument(BsonDocument.class, null); Assertions.assertNotNull(rootDoc); addFieldsDoc = rootDoc.getDocument("$project"); Assertions.assertNotNull(addFieldsDoc); cstDateTime = addFieldsDoc.getDateTime("cts"); Assertions.assertNotNull(cstDateTime); expectedDoc = BsonDocument.parse( "{\"$project\": " + "{\"cts\": " + "{\"$date\": " + "{\"$numberLong\": " + "\"" + cstDateTime.getValue() + "\"" + "}}, \"_id\": 0}}"); Assertions.assertEquals( expectedDoc, result2.getAggregateOperations().get(0)); final String currentTimeQuery = String.format( "SELECT CURRENT_TIME AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext result3 = queryMapper.get(currentTimeQuery); Assertions.assertNotNull(result3); Assertions.assertEquals(DATE_COLLECTION_NAME, result3.getCollectionName()); Assertions.assertEquals(1, result3.getColumnMetaData().size()); Assertions.assertEquals(1, result3.getAggregateOperations().size()); rootDoc = result3.getAggregateOperations() .get(0).toBsonDocument(BsonDocument.class, null); Assertions.assertNotNull(rootDoc); addFieldsDoc = rootDoc.getDocument("$project"); Assertions.assertNotNull(addFieldsDoc); cstDateTime = addFieldsDoc.getDateTime("cts"); Assertions.assertNotNull(cstDateTime); expectedDoc = BsonDocument.parse( "{\"$project\": " + "{\"cts\": " + "{\"$date\": " + "{\"$numberLong\": " + "\"" + cstDateTime.getValue() + "\"" + "}}, \"_id\": 0}}"); Assertions.assertEquals( expectedDoc, result3.getAggregateOperations().get(0)); } /** * Tests TIMESTAMPADD for MONTH, YEAR or QUARTER. */ @Test @DisplayName("Tests TIMESTAMPADD for MONTH, YEAR or QUARTER.") void testTimestampAddMonthYearFunction() { final String timestampAddQuery8 = String.format( "SELECT TIMESTAMPADD(MONTH, 10, \"field\") AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); Assertions.assertEquals(String.format("Unable to parse SQL" + " 'SELECT TIMESTAMPADD(MONTH, 10, \"field\") AS \"cts\" FROM \"database\".\"dateTestCollection\"'. --" + " Reason: 'Conversion between the source type (INTERVAL_MONTH) and the target type (TIMESTAMP) is not supported.'"), Assertions.assertThrows(SQLException.class, () -> queryMapper.get(timestampAddQuery8)) .getMessage()); final String timestampAddQuery9 = String.format( "SELECT TIMESTAMPADD(YEAR, 10, \"field\") AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); Assertions.assertEquals(String.format("Unable to parse SQL" + " 'SELECT TIMESTAMPADD(YEAR, 10, \"field\") AS \"cts\" FROM \"database\".\"dateTestCollection\"'. --" + " Reason: 'Conversion between the source type (INTERVAL_YEAR) and the target type (TIMESTAMP) is not supported.'"), Assertions.assertThrows(SQLException.class, () -> queryMapper.get(timestampAddQuery9)) .getMessage()); final String timestampAddQuery10 = String.format( "SELECT TIMESTAMPADD(QUARTER, 10, \"field\") AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); Assertions.assertEquals(String.format("Unable to parse SQL" + " 'SELECT TIMESTAMPADD(QUARTER, 10, \"field\") AS \"cts\" FROM \"database\".\"dateTestCollection\"'. --" + " Reason: 'Conversion between the source type (INTERVAL_MONTH) and the target type (TIMESTAMP) is not supported.'"), Assertions.assertThrows(SQLException.class, () -> queryMapper.get(timestampAddQuery10)) .getMessage()); } /** * Tests DAYNAME. * @throws SQLException occurs if query fails. */ @Test @DisplayName("Tests DAYNAME.") void testDayName() throws SQLException { final String dayNameQuery = String.format( "SELECT DAYNAME(\"field\") AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(1, operations.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"cts\":" + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 1]}, \"Sunday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 2]}, \"Monday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 3]}, \"Tuesday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 4]}, \"Wednesday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 5]}, \"Thursday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 6]}, \"Friday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 7]}, \"Saturday\"," + " null]}]}]}]}]}]}]}, " + " \"_id\": 0}}"), operations.get(0)); final String dayNameQuery2 = String.format( "SELECT DAYNAME(NULL) AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context2 = queryMapper.get(dayNameQuery2); Assertions.assertNotNull(context2); final List<Bson> operations2 = context2.getAggregateOperations(); Assertions.assertEquals(1, operations2.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"cts\":" + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 1]}, \"Sunday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 2]}, \"Monday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 3]}, \"Tuesday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 4]}, \"Wednesday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 5]}, \"Thursday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 6]}, \"Friday\"," + " {\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": null}, 7]}, \"Saturday\"," + " null]}]}]}]}]}]}]}, " + " \"_id\": 0}}"), operations2.get(0)); } /** * Tests MONTHNAME. * @throws SQLException occurs if query fails. */ @Test @DisplayName("Tests MONTHNAME.") void testMonthName() throws SQLException { final String dayNameQuery = String.format( "SELECT MONTHNAME(\"field\") AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(1, operations.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"cts\":" + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 1]}, \"January\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 2]}, \"February\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 3]}, \"March\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 4]}, \"April\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 5]}, \"May\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 6]}, \"June\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 7]}, \"July\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 8]}, \"August\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 9]}, \"September\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 10]}, \"October\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 11]}, \"November\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 12]}, \"December\"," + " null]}]}]}]}]}]}]}]}]}]}]}]}, " + " \"_id\": 0}}"), operations.get(0)); final String dayNameQuery2 = String.format( "SELECT MONTHNAME(NULL) AS \"cts\"" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context2 = queryMapper.get(dayNameQuery2); Assertions.assertNotNull(context2); final List<Bson> operations2 = context2.getAggregateOperations(); Assertions.assertEquals(1, operations2.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"cts\":" + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 1]}, \"January\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 2]}, \"February\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 3]}, \"March\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 4]}, \"April\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 5]}, \"May\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 6]}, \"June\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 7]}, \"July\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 8]}, \"August\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 9]}, \"September\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 10]}, \"October\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 11]}, \"November\"," + " {\"$cond\": [{\"$eq\": [{\"$month\": null}, 12]}, \"December\"," + " null]}]}]}]}]}]}]}]}]}]}]}]}, " + " \"_id\": 0 }}"), operations2.get(0)); } @Test @DisplayName("Tests FLOOR(ts TO <x>).") void testFloorForDate() throws SQLException { final String floorDayQuery = String.format( "SELECT" + " FLOOR(\"field\" TO YEAR)," + " FLOOR(\"field\" TO MONTH)," + " FLOOR(\"field\" TO DAY)," + " FLOOR(\"field\" TO HOUR)," + " FLOOR(\"field\" TO MINUTE)," + " FLOOR(\"field\" TO SECOND)," + " FLOOR(\"field\" TO MILLISECOND)" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(floorDayQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(1, operations.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\":" + " {\"EXPR$0\": {\"$dateFromString\": {\"dateString\":" + " {\"$dateToString\": {\"date\": \"$field\", \"format\": \"%Y-01-01T00:00:00Z\"}}}}," + " \"EXPR$1\": {\"$dateFromString\": {\"dateString\":" + " {\"$dateToString\": {\"date\": \"$field\", \"format\": \"%Y-%m-01T00:00:00Z\"}}}}," + " \"EXPR$2\": {\"$add\": [{\"$date\": \"1970-01-01T00:00:00Z\"}," + " {\"$multiply\": [86400000, {\"$divide\": [{\"$subtract\":" + " [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " {\"$mod\": [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " 86400000]}]}, 86400000]}]}]}," + " \"EXPR$3\": {\"$add\": [{\"$date\": \"1970-01-01T00:00:00Z\"}," + " {\"$multiply\": [3600000, {\"$divide\": [{\"$subtract\":" + " [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " {\"$mod\": [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " 3600000]}]}, 3600000]}]}]}," + " \"EXPR$4\": {\"$add\": [{\"$date\": \"1970-01-01T00:00:00Z\"}," + " {\"$multiply\": [60000, {\"$divide\": [{\"$subtract\":" + " [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " {\"$mod\": [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " 60000]}]}, 60000]}]}]}," + " \"EXPR$5\": {\"$add\": [{\"$date\": \"1970-01-01T00:00:00Z\"}," + " {\"$multiply\": [1000, {\"$divide\": [{\"$subtract\":" + " [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " {\"$mod\": [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " 1000]}]}, 1000]}]}]}," + " \"EXPR$6\": {\"$add\": [{\"$date\": \"1970-01-01T00:00:00Z\"}," + " {\"$multiply\": [1, {\"$divide\": [{\"$subtract\":" + " [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " {\"$mod\": [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-01T00:00:00Z\"}]}," + " 1]}]}, 1]}]}]}, " + " \"_id\": 0}}").toJson(), ((BsonDocument) operations.get(0)).toJson()); final String floorDayQuery1 = String.format( "SELECT" + " FLOOR(\"field\" TO WEEK)" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context1 = queryMapper.get(floorDayQuery1); Assertions.assertNotNull(context1); final List<Bson> operations1 = context1.getAggregateOperations(); Assertions.assertEquals(1, operations1.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\":" + " {\"EXPR$0\": " + " {\"$add\": [" + " {\"$date\": \"1970-01-05T00:00:00Z\"}, " + " {\"$multiply\": [604800000, " + " {\"$divide\": [" + " {\"$subtract\": [" + " {\"$subtract\": [\"$field\", {\"$date\": \"1970-01-05T00:00:00Z\"}]}, " + " {\"$mod\": [{\"$subtract\": [\"$field\", {\"$date\": \"1970-01-05T00:00:00Z\"}]}, 604800000]}]}, 604800000]}]}]}, " + " \"_id\": 0}}").toJson(), ((BsonDocument) operations1.get(0)).toJson()); final String floorDayQuery2 = String.format( "SELECT TIMESTAMPADD(WEEK,%n" + " TIMESTAMPDIFF(WEEK,%n" + " TIMESTAMP '1970-01-05', \"field\"),%n" + " TIMESTAMP '1970-01-05')%n" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context2 = queryMapper.get(floorDayQuery2); Assertions.assertNotNull(context2); final List<Bson> operations2 = context2.getAggregateOperations(); Assertions.assertEquals(1, operations2.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\":" + " {\"EXPR$0\":" + " {\"$add\": [" + " {\"$date\": \"1970-01-05T00:00:00Z\"}, " + " {\"$multiply\": [" + " {\"$literal\": {\"$numberLong\": \"604800000\"}}, " + " {\"$divide\": [" + " {\"$subtract\": [" + " {\"$divide\": [" + " {\"$subtract\": [" + " {\"$subtract\": [" + " \"$field\", " + " {\"$date\": \"1970-01-05T00:00:00Z\"}]" + " }, " + " {\"$mod\": [" + " {\"$subtract\": [" + " \"$field\", " + " {\"$date\": \"1970-01-05T00:00:00Z\"}]" + " }, " + " {\"$literal\": 1000}]" + " }]" + " }, " + " {\"$literal\": 1000}]" + " }, " + " {\"$mod\": [" + " {\"$divide\": [" + " {\"$subtract\": [" + " {\"$subtract\": [" + " \"$field\", " + " {\"$date\": \"1970-01-05T00:00:00Z\"}]" + " }, " + " {\"$mod\": [" + " {\"$subtract\": [" + " \"$field\", " + " {\"$date\": \"1970-01-05T00:00:00Z\"}]" + " }, " + " {\"$literal\": 1000}]" + " }]" + " }, " + " {\"$literal\": 1000}]" + " }, " + " {\"$literal\": 604800}]" + " }]" + " }, " + " {\"$literal\": 604800}]" + " }]" + " }]" + " }, " + " \"_id\": 0 " + " }" + "}").toJson(), ((BsonDocument) operations2.get(0)).toJson()); final String floorDayQuery3 = String.format( "SELECT FLOOR(\"field\" TO QUARTER)" + " FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context3 = queryMapper.get(floorDayQuery3); Assertions.assertNotNull(context3); final List<Bson> operations3 = context3.getAggregateOperations(); Assertions.assertEquals(1, operations3.size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\":" + " {\"EXPR$0\": " + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 3]}," + " {\"$dateFromString\": {\"dateString\": {\"$dateToString\": {\"date\": \"$field\", \"format\": \"%Y-01-01T00:00:00Z\"}}}}," + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 6]}, " + " {\"$dateFromString\": {\"dateString\": {\"$dateToString\": {\"date\": \"$field\", \"format\": \"%Y-04-01T00:00:00Z\"}}}}," + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 9]}," + " {\"$dateFromString\": {\"dateString\": {\"$dateToString\": {\"date\": \"$field\", \"format\": \"%Y-07-01T00:00:00Z\"}}}}," + " {\"$cond\": [{\"$lte\": [{\"$month\": \"$field\"}, 12]}," + " {\"$dateFromString\": {\"dateString\": {\"$dateToString\": {\"date\": \"$field\", \"format\": \"%Y-10-01T00:00:00Z\"}}}}," + " null]}]}]}]}, " + " \"_id\": 0 }}").toJson(), ((BsonDocument) operations3.get(0)).toJson()); } @Test @DisplayName("Tests MONTHNAME in WHERE clause.") void testWhereMonthName() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE MONTHNAME(\"field\") = 'February'", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(4, operations.size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"field\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [" + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 1]}, \"January\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 2]}, \"February\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 3]}, \"March\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 4]}, \"April\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 5]}, \"May\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 6]}, \"June\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 7]}, \"July\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 8]}, \"August\", {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 9]}, \"September\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 10]}, \"October\", {\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 11]}, \"November\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 12]}, \"December\", null]}]}]}]}]}]}]}]}]}]}]}]}, null]}, " + "{\"$gt\": [{\"$literal\": \"February\"}, null]}]}, " + "{\"$eq\": [{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 1]}, \"January\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 2]}, \"February\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 3]}, \"March\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 4]}, \"April\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 5]}, \"May\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 6]}, \"June\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 7]}, \"July\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 8]}, \"August\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 9]}, \"September\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 10]}, \"October\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 11]}, \"November\", " + "{\"$cond\": [{\"$eq\": [{\"$month\": \"$field\"}, 12]}, \"December\", null]}]}]}]}]}]}]}]}]}]}]}]}, " + "{\"$literal\": \"February\"}]}, null]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), operations.get(1)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), operations.get(2)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(3)); } @Test @DisplayName("Tests DAYNAME in WHERE clause.") void testWhereDayName() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE DAYNAME(\"field\") = 'Tuesday'", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(4, operations.size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"field\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [" + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 1]}, \"Sunday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 2]}, \"Monday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 3]}, \"Tuesday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 4]}, \"Wednesday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 5]}, \"Thursday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 6]}, \"Friday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 7]}, \"Saturday\", null]}]}]}]}]}]}]}, null]}, " + "{\"$gt\": [{\"$literal\": \"Tuesday\"}, null]}]}, " + "{\"$eq\": [{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 1]}, \"Sunday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 2]}, \"Monday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 3]}, \"Tuesday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 4]}, \"Wednesday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 5]}, \"Thursday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 6]}, \"Friday\", " + "{\"$cond\": [{\"$eq\": [{\"$dayOfWeek\": \"$field\"}, 7]}, \"Saturday\", null]}]}]}]}]}]}]}, " + "{\"$literal\": \"Tuesday\"}]}, null]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), operations.get(1)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), operations.get(2)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(3)); } @Test @DisplayName("Tests CURRENT_DATE in WHERE clause.") void testWhereCurrentDate() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE \"field\" <> CURRENT_DATE", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(2, operations.size()); final BsonDocument rootDoc = context.getAggregateOperations() .get(0).toBsonDocument(BsonDocument.class, null); Assertions.assertNotNull(rootDoc); final BsonDocument matchDoc = rootDoc.getDocument("$match"); Assertions.assertNotNull(matchDoc); final BsonDateTime cstDateTime = matchDoc.getDocument("field").getArray("$nin").get(1).asDateTime(); Assertions.assertNotNull(cstDateTime); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {\"field\": { \"$nin\": [null, {\"$date\": {\"$numberLong\": \"" + cstDateTime.getValue() + "\"}}]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(1)); } @Test @DisplayName("Tests CURRENT_TIME in WHERE clause.") void testWhereCurrentTime() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE CAST(\"field\" AS TIME) <> CURRENT_TIME", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(2, operations.size()); final BsonDocument rootDoc = context.getAggregateOperations() .get(0).toBsonDocument(BsonDocument.class, null); Assertions.assertNotNull(rootDoc); final BsonDocument matchDoc = rootDoc.getDocument("$match"); Assertions.assertNotNull(matchDoc); final BsonDateTime cstDateTime = matchDoc.getDocument("field").getArray("$nin").get(1).asDateTime(); Assertions.assertNotNull(cstDateTime); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {\"field\": { \"$nin\": [null, {\"$date\": {\"$numberLong\": \"" + cstDateTime.getValue() + "\"}}]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(1)); } @Test @DisplayName("Tests CURRENT_TIMESTAMP in WHERE clause.") void testWhereCurrentTimestamp() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE CAST(\"field\" as TIMESTAMP) <> CURRENT_TIMESTAMP", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(2, operations.size()); final BsonDocument rootDoc = context.getAggregateOperations() .get(0).toBsonDocument(BsonDocument.class, null); Assertions.assertNotNull(rootDoc); final BsonDocument matchDoc = rootDoc.getDocument("$match"); Assertions.assertNotNull(matchDoc); final BsonDateTime cstDateTime = matchDoc.getDocument("field").getArray("$nin").get(1).asDateTime(); Assertions.assertNotNull(cstDateTime); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {\"field\": { \"$nin\": [null, {\"$date\": {\"$numberLong\": \"" + cstDateTime.getValue() + "\"}}]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(1)); } @Test @DisplayName("Tests date extract in WHERE clause.") void testWhereExtract() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE EXTRACT(YEAR FROM \"field\") = 2021", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(4, operations.size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"field\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$year\": \"$field\"}, null]}, " + "{\"$gt\": [ {\"$literal\": {\"$numberLong\": \"2021\"}}, null]}]}, {\"$eq\": [{\"$year\": \"$field\"}, {\"$literal\": {\"$numberLong\": \"2021\"}}]}, null]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), operations.get(1)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), operations.get(2)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(3)); } @Test @DisplayName("Tests TIMESTAMPADD in WHERE clause.") void testWhereTimestampAdd() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE TIMESTAMPADD(DAY, 3, \"field\") = '2020-01-04'", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(4, operations.size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"field\": 1," + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$add\": [\"$field\", {\"$literal\": {\"$numberLong\": \"259200000\"}}]}, null]}, " + "{\"$gt\": [{\"$date\": \"2020-01-04T00:00:00Z\"}, null]}]}, " + "{\"$eq\": [{\"$add\": [\"$field\", {\"$literal\": {\"$numberLong\": \"259200000\"}}]}, {\"$date\": \"2020-01-04T00:00:00Z\"}]}, null]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), operations.get(1)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), operations.get(2)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(3)); } @Test @DisplayName("Tests TIMESTAMPADD on right in WHERE clause.") void testWhereTimestampAddOnRightOrLeft() throws SQLException { final String dayNameQuery1 = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" <= TIMESTAMPADD(DAY, -3, CURRENT_DATE)", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context1 = queryMapper.get(dayNameQuery1); Assertions.assertNotNull(context1); final List<Bson> operations1 = context1.getAggregateOperations(); Assertions.assertEquals(2, operations1.size()); final BsonDocument rootDoc1 = operations1.get(0).toBsonDocument(); final BsonDocument matchDoc1 = rootDoc1.getDocument("$match"); final BsonDateTime currDate1 = matchDoc1 .getDocument("field") .getDateTime("$lte"); Assertions.assertNotNull(currDate1); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"field\": {\"$lte\": {\"$date\": {\"$numberLong\": \"" + currDate1.getValue() + "\"}}}}}").toJson(), operations1.get(0).toBsonDocument().toJson()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations1.get(1)); final String dayNameQuery2 = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" <= TIMESTAMPADD(DAY, -3, CURRENT_TIMESTAMP)", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context2 = queryMapper.get(dayNameQuery2); Assertions.assertNotNull(context2); final List<Bson> operations2 = context2.getAggregateOperations(); Assertions.assertEquals(2, operations2.size()); final BsonDocument rootDoc2 = operations2.get(0).toBsonDocument(); final BsonDocument matchDoc2 = rootDoc2.getDocument("$match"); final BsonDateTime currDate2 = matchDoc2 .getDocument("field") .getDateTime("$lte"); Assertions.assertNotNull(currDate2); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"field\": {\"$lte\": {\"$date\": {\"$numberLong\": \"" + currDate2.getValue() + "\"}}}}}").toJson(), operations2.get(0).toBsonDocument().toJson()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations2.get(1)); final String dayNameQuery3 = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE TIMESTAMPADD(DAY, -3, CURRENT_DATE) >= \"field\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context3 = queryMapper.get(dayNameQuery3); Assertions.assertNotNull(context3); final List<Bson> operations3 = context3.getAggregateOperations(); Assertions.assertEquals(2, operations3.size()); final BsonDocument rootDoc3 = operations3.get(0).toBsonDocument(); final BsonDocument matchDoc3 = rootDoc3.getDocument("$match"); final BsonDateTime currDate3 = matchDoc3 .getDocument("field") .getDateTime("$lt"); Assertions.assertNotNull(currDate3); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"field\": {\"$lt\": {\"$date\": {\"$numberLong\": \"" + currDate3.getValue() + "\"}}}}}").toJson(), operations3.get(0).toBsonDocument().toJson()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations3.get(1)); final String dayNameQuery4 = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE TIMESTAMPADD(DAY, -3, CURRENT_TIMESTAMP) >= \"field\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context4 = queryMapper.get(dayNameQuery4); Assertions.assertNotNull(context4); final List<Bson> operations4 = context4.getAggregateOperations(); Assertions.assertEquals(2, operations4.size()); final BsonDocument rootDoc4 = operations4.get(0).toBsonDocument(); final BsonDocument matchDoc4 = rootDoc4.getDocument("$match"); final BsonDateTime currDate4 = matchDoc4 .getDocument("field") .getDateTime("$lt"); Assertions.assertNotNull(currDate4); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"field\": {\"$lt\": {\"$date\": {\"$numberLong\": \"" + currDate4.getValue() + "\"}}}}}").toJson(), operations4.get(0).toBsonDocument().toJson()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations4.get(1)); final String dayNameQuery5 = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" <= TIMESTAMPADD(DAY, -3, DATE '2020-01-04')", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context5 = queryMapper.get(dayNameQuery5); Assertions.assertNotNull(context5); final List<Bson> operations5 = context5.getAggregateOperations(); Assertions.assertEquals(2, operations5.size()); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"field\": {\"$lte\": {\"$date\": \"2020-01-01T00:00:00Z\"}}}}}").toJson(), operations5.get(0).toBsonDocument().toJson()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations5.get(1)); final String dayNameQuery6 = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE TIMESTAMPADD(DAY, -3, DATE '2020-01-04') >= \"field\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context6 = queryMapper.get(dayNameQuery6); Assertions.assertNotNull(context6); final List<Bson> operations6 = context6.getAggregateOperations(); Assertions.assertEquals(2, operations6.size()); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"field\": {\"$lt\": {\"$date\": \"2020-01-01T00:00:00Z\"}}}}}").toJson(), operations6.get(0).toBsonDocument().toJson()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations6.get(1)); } @Test @DisplayName("Tests TIMESTAMPDIFF in WHERE clause.") void testWhereTimestampDiff() throws SQLException { final String dayNameQuery = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE TIMESTAMPDIFF(DAY, \"field\", \"field\") = 0", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext context = queryMapper.get(dayNameQuery); Assertions.assertNotNull(context); final List<Bson> operations = context.getAggregateOperations(); Assertions.assertEquals(4, operations.size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"field\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, " + "{\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 86400000}]}]}, {\"$literal\": 86400000}]}, null]}, " + "{\"$gt\": [{\"$literal\": 0}, null]}]}, {\"$eq\": [{\"$divide\": [{\"$subtract\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$mod\": [{\"$subtract\": [\"$field\", \"$field\"]}, {\"$literal\": 86400000}]}]}, {\"$literal\": 86400000}]}, {\"$literal\": 0}]}, null]}}}"), operations.get(0)); Assertions.assertEquals(BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), operations.get(1)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), operations.get(2)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"dateTestCollection__id\": \"$_id\", \"field\": \"$field\", \"_id\": 0}}"), operations.get(3)); } }
4,535
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbMqlQueryContextTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class DocumentDbMqlQueryContextTest { @Test @DisplayName("Tests that aggregate operations are correctly converted from BSON document to string in getter.") void testGetAggregateOperationsAsStrings() { final List<String> stages = new ArrayList<>(); // Generic stage stages.add( "{\"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\": \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true}}"); // Stage with 3-valued logic (many null checks) stages.add( "{\"$project\": {" + "\"booleanField\": " + "{\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, " + "{\"$gt\": [\"$array.field2\", null]}]}, " + "{\"$eq\": [\"$array.field\", \"$array.field2\"]}, null]}}}"); // Stage with different Bson types stages.add( "{\"$project\": {" + "\"literalNull\": {\"$literal\": null}, " + "\"literalTimestamp\": {\"$date\": {\"$numberLong\": \"1505938660000\"}}, " + "\"literalInt\": {\"$literal\": {\"$numberInt\": \"-2147483648\"}}, " + "\"literalDecimal\": {\"$literal\": {\"$numberDouble\": \"123.45\"}}, " + "\"literalVarchar\": {\"$literal\": \"Hello! 你好!\"}, " + "\"literalBinary\": {\"$binary\": {\"base64\": \"RfCr\", \"subType\": \"00\"}}}}"); // Stage with a lot of nesting and aggregate operators stages.add( "{\"$project\": {\"EXPR$0\": {\"$substrCP\": [\"$array.field\", " + "{\"$subtract\": [\"$array.field2\", {\"$numberInt\": \"1\"}]}, " + "{\"$subtract\": [\"$array.field1\", \"$array.field2\"]}]}, " + "\"_id\": {\"$numberInt\": \"0\"}}}"); final DocumentDbMqlQueryContext context = DocumentDbMqlQueryContext.builder() .aggregateOperations( stages.stream().map(BsonDocument::parse).collect(Collectors.toList())) .build(); Assertions.assertEquals(stages.size(), context.getAggregateOperationsAsStrings().size()); for (int i = 0; i < stages.size(); i++) { Assertions.assertEquals(stages.get(i), context.getAggregateOperationsAsStrings().get(i)); } } }
4,536
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceMaxRowsTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import java.sql.SQLException; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryMappingServiceMaxRowsTest extends DocumentDbQueryMappingServiceTest { private static final String COLLECTION_NAME = "testCollection"; private static DocumentDbQueryMappingService queryMapper; @BeforeAll void initialize() throws SQLException { final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ { \"field\" : 1, \"field1\": \"value\" }, { \"field\" : 2, \"field2\" : \"value\" } ]}"); insertBsonDocuments(COLLECTION_NAME, new BsonDocument[]{document}); queryMapper = getQueryMappingService(); } @Test @DisplayName("Tests $limit is produced when max rows is passed.") void testMaxRows() throws SQLException { final String query = String.format("SELECT * FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result = queryMapper.get(query, 10); Assertions.assertNotNull(result); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(2, result.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"testCollection__id\": '$_id', \"_id\": 0}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"10\"}}"), result.getAggregateOperations().get(1)); } @Test @DisplayName("Tests $limit and $sort are produced when max rows is set.") void testOrderByWithMaxRows() throws SQLException { final String queryWithAscendingSort = String.format( "SELECT * FROM \"%s\".\"%s\" ORDER BY \"%s\" ASC", getDatabaseName(), COLLECTION_NAME + "_array", "field"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithAscendingSort, 10); Assertions.assertNotNull(result); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(5, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": 1 } }"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"10\"}}"), result.getAggregateOperations().get(4)); } @Test @DisplayName("Tests $limit and $sort are produced when max rows is set and the query has a limit.") void testOrderByWithMaxRowsAndLimit() throws SQLException { final String queryWithAscendingSort = String.format( "SELECT * FROM \"%s\".\"%s\" ORDER BY \"%s\" ASC LIMIT 5", getDatabaseName(), COLLECTION_NAME + "_array", "field"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithAscendingSort, 10); Assertions.assertNotNull(result); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": 1 } }"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"5\"}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"10\"}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests $limit and $sort are produced when max rows is set and the subquery has a limit.") void testOrderByWithMaxRowsAndInnerLimit() throws SQLException { final String queryWithAscendingSort = String.format( "SELECT * FROM ( SELECT * FROM \"%s\".\"%s\" LIMIT 20 ) ORDER BY \"%s\" ASC", getDatabaseName(), COLLECTION_NAME + "_array", "field"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithAscendingSort, 10); Assertions.assertNotNull(result); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(7, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"20\"}}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$testCollection__id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$field\", " + "\"field1\": \"$field1\", " + "\"field2\": \"$field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": 1 } }"), result.getAggregateOperations().get(5)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"10\"}}"), result.getAggregateOperations().get(6)); } @Test @DisplayName("Tests $limit and $sort are produced when max rows is set and the sub query and the query has limit.") void testOrderByWithMaxRowsAndInnerLimitAndOuterLimit() throws SQLException { final String queryWithAscendingSort = String.format( "SELECT * FROM (SELECT * FROM \"%s\".\"%s\" LIMIT 20) ORDER BY \"%s\" ASC LIMIT 30", getDatabaseName(), COLLECTION_NAME + "_array", "field"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithAscendingSort, 10); Assertions.assertNotNull(result); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(8, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"20\"}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$testCollection__id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$field\", " + "\"field1\": \"$field1\", " + "\"field2\": \"$field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": 1 } }"), result.getAggregateOperations().get(5)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"30\"}}}"), result.getAggregateOperations().get(6)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"10\"}}"), result.getAggregateOperations().get(7)); } @Test @DisplayName("Tests $limit is produced and $sort is omitted when max rows is set and the sub query and the query has no limit.") void testOrderByWithMaxRowsAndInnerOrderBy() throws SQLException { final String queryWithAscendingSort = String.format( "SELECT * FROM (SELECT * FROM \"%s\".\"%s\" ORDER BY \"%s\" ASC) ", getDatabaseName(), COLLECTION_NAME + "_array", "field"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithAscendingSort, 10); Assertions.assertNotNull(result); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"10\"}}"), result.getAggregateOperations().get(3)); } }
4,537
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceBasicTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.bson.BsonDateTime; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbFilter; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import java.sql.SQLException; import java.time.Instant; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryMappingServiceBasicTest extends DocumentDbQueryMappingServiceTest { private static final String COLLECTION_NAME = "testCollection"; private static final String OTHER_COLLECTION_NAME = "otherTestCollection"; private static final String DATE_COLLECTION_NAME = "dateTestCollection"; private static final String NESTED_ID_COLLECTION_NAME = "nestedIdCollection"; private static final String NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME = "nestedArrayDocumentInANestedArrayCollection"; private static DocumentDbQueryMappingService queryMapper; @BeforeAll @SuppressFBWarnings(value = "HARD_CODE_PASSWORD", justification = "Hardcoded for test purposes only") void initialize() throws SQLException { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ " + "{ \"field\" : 1, \"field1\": \"value\" }, " + "{ \"field\" : 2, \"field2\" : \"value\" , \"field3\" : { \"field4\": 3} } ]}"); final BsonDocument otherDocument = BsonDocument.parse( "{ \"_id\" : \"key1\", \"otherArray\" : [ { \"field\" : 1, \"field3\": \"value\" }, { \"field\" : 2, \"field3\" : \"value\" } ]}"); final BsonDocument doc1 = BsonDocument.parse("{\"_id\": 101}"); doc1.append("field", new BsonDateTime(dateTime)); final BsonDocument nestedIddocument = BsonDocument.parse( "{ \"_id\" : \"key\", \"document\" : { \"_id\" : 1, \"field1\": \"value\" } }"); final BsonDocument nestedArrayDocumentInANestedArray = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ " + "{ \"array2\" : [ { \"_id\" : 1, \"field1\": \"value\" } ] } ] }"); insertBsonDocuments(COLLECTION_NAME, new BsonDocument[]{document}); insertBsonDocuments(OTHER_COLLECTION_NAME, new BsonDocument[]{otherDocument}); insertBsonDocuments(DATE_COLLECTION_NAME, new BsonDocument[]{doc1}); insertBsonDocuments(NESTED_ID_COLLECTION_NAME, new BsonDocument[]{nestedIddocument}); insertBsonDocuments(NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME, new BsonDocument[]{nestedArrayDocumentInANestedArray}); queryMapper = getQueryMappingService(); } @Test @DisplayName("Tests that select works for querying single base or virtual tables.") void testQueryWithSelect() throws SQLException { // Get the base table. final String basicQuery = String.format("SELECT * FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); DocumentDbMqlQueryContext result = queryMapper.get(basicQuery); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse("{\"$project\": {\"testCollection__id\": '$_id', \"_id\": 0}}"), result.getAggregateOperations().get(0)); // Get the nested table. final String nestedTableQuery = String.format("SELECT * FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(nestedTableQuery); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests that project works for querying a single table.") void testQueryWithProject() throws SQLException { final String query = String.format( "SELECT \"%s\" FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\":{\"field\": \"$array.field\", \"_id\": 0}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests that distinct keyword works when querying a single table.") void testQueryWithDistinct() throws SQLException { final String query = String.format( "SELECT DISTINCT \"%s\" FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$group\": {\"_id\": \"$array.field\"}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$project\": {\"field\": \"$_id\"}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that as works when querying a single table.") void testQueryWithAs() throws SQLException { final String query = String.format( "SELECT \"%s\" AS \"renamed\" FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\":{\"renamed\": \"$array.field\", \"_id\": 0}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests that where works with 1 or more conditions when querying a single table.") void testQueryWithWhere() throws SQLException { final String queryWithWhere = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE \"%s\" = %s", getDatabaseName(), COLLECTION_NAME + "_array", "field", 1); DocumentDbMqlQueryContext result = queryMapper.get(queryWithWhere); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"array.field\": {\"$eq\": 1}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); final String queryWithCompoundWhere = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE \"%s\" = '%s' AND \"%s\" > %s", getDatabaseName(), COLLECTION_NAME + "_array", "field1", "value", "field", 0); result = queryMapper.get(queryWithCompoundWhere); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$and\": [{\"array.field1\": {\"$eq\": \"value\"}}, {\"array.field\": {\"$gt\": 0}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that limit works when querying a single table.") void testQueryWithLimit() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" LIMIT 1", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"1\"}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that order by works with 1 or more sort conditions in ascending and descending order.") void testQueryWithOrderBy() throws SQLException { final String queryWithAscendingSort = String.format( "SELECT * FROM \"%s\".\"%s\" ORDER BY \"%s\" ASC", getDatabaseName(), COLLECTION_NAME + "_array", "field"); DocumentDbMqlQueryContext result = queryMapper.get(queryWithAscendingSort); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": 1 } }"), result.getAggregateOperations().get(3)); final String queryWithDescendingSort = String.format( "SELECT * FROM \"%s\".\"%s\" ORDER BY \"%s\" DESC", getDatabaseName(), COLLECTION_NAME + "_array", "field"); result = queryMapper.get(queryWithDescendingSort); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": -1 } }"), result.getAggregateOperations().get(3)); final String queryWithCompoundSort = String.format( "SELECT * FROM \"%s\".\"%s\" ORDER BY \"%s\" ASC, \"%s\" DESC", getDatabaseName(), COLLECTION_NAME + "_array", "field", "field1"); result = queryMapper.get(queryWithCompoundSort); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{ \"$sort\": {\"field\": 1, \"field1\": -1 } }"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that group by works when querying a single table.") void testQueryWithGroupBy() throws SQLException { final String queryWithGroupBy = String.format( "SELECT \"%s\", \"%s\", \"%s\" FROM \"%s\".\"%s\" GROUP BY \"%s\", \"%s\", \"%s\"", COLLECTION_NAME + "__id", "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", "field", "field1"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithGroupBy); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(3, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {\"testCollection__id\": \"$_id\", \"field\": \"$array.field\", \"field1\": \"$array.field1\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, \"testCollection__id\": \"$_id.testCollection__id\", \"field\": \"$_id.field\", \"field1\": \"$_id.field1\"}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that count, sum, min, max, and avg work when querying a single table.") void testQueryWithAggregateFunctions() throws SQLException { final String queryWithCount = String.format( "SELECT COUNT(\"%s\") FROM \"%s\".\"%s\"", "field1", getDatabaseName(), COLLECTION_NAME + "_array"); DocumentDbMqlQueryContext result = queryMapper.get(queryWithCount); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$sum\": {\"$cond\": [{\"$gt\": [\"$array.field1\", null]}, 1, 0]}}}}"), result.getAggregateOperations().get(2)); final String queryWithDistinctCount = String.format( "SELECT COUNT(DISTINCT \"%s\") FROM \"%s\".\"%s\"", "field1", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithDistinctCount); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$addToSet\": \"$array.field1\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 0, " + "\"EXPR$0\": {\"$size\": {\"$filter\": {\"input\": \"$EXPR$0\", " + "\"cond\": {\"$gt\": [\"$$this\", null]}}}}}}"), result.getAggregateOperations().get(3)); final String queryWithAverage = String.format( "SELECT AVG(\"%s\") FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithAverage); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$avg\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); final String queryWithAverageDistinct = String.format( "SELECT AVG(DISTINCT \"%s\") FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithAverageDistinct); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$addToSet\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$project\": {\"_id\": 0, \"EXPR$0\": {\"$avg\": \"$EXPR$0\"}}}"), result.getAggregateOperations().get(3)); final String queryWithSum = String.format( "SELECT SUM(\"%s\") FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithSum); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$push\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 0, " + "\"EXPR$0\": {\"$cond\": [{\"$gt\": [{\"$size\": {\"$filter\": {\"input\": \"$EXPR$0\", " + "\"cond\": {\"$gt\": [\"$$this\", null]}}}}, 0]}, {\"$sum\": \"$EXPR$0\"}, null]}}}"), result.getAggregateOperations().get(3)); final String queryWithSumDistinct = String.format( "SELECT SUM(DISTINCT \"%s\") FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithSumDistinct); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$addToSet\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 0, " + "\"EXPR$0\": {\"$cond\": [{\"$gt\": [{\"$size\": {\"$filter\": {\"input\": \"$EXPR$0\", " + "\"cond\": {\"$gt\": [\"$$this\", null]}}}}, 0]}, {\"$sum\": \"$EXPR$0\"}, null]}}}"), result.getAggregateOperations().get(3)); final String queryWithMin = String.format( "SELECT MIN(\"%s\") FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithMin); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$min\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); final String queryWithMax = String.format( "SELECT MAX(\"%s\") FROM \"%s\".\"%s\"", "field", getDatabaseName(), COLLECTION_NAME + "_array"); result = queryMapper.get(queryWithMax); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$max\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests that arithmetic operators work when querying a single table.") void testQueryWithArithmeticOperators() throws SQLException { final String queryWithSum = String.format( "SELECT SUM(\"%s\") / COUNT(\"%s\") FROM \"%s\".\"%s\"", "field", "field", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithSum); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {}, \"_f0\": {\"$sum\": \"$array.field\"}, \"_f1\": {\"$sum\": {\"$cond\": [{\"$gt\": [\"$array.field\", null]}, 1, 0]}}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$divide\": [{\"$cond\": [{\"$cond\": [{\"$and\": [{\"$gt\": [\"$_f1\", null]}, {\"$gt\": [{\"$literal\": 0}, null]}]}, {\"$eq\": [\"$_f1\", {\"$literal\": 0}]}, null]}, null, \"$_f0\"]}, \"$_f1\"]}, \"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that having works when querying a single table.") void testQueryWithHaving() throws SQLException { final String queryWithHaving = String.format( "SELECT \"%s\", \"%s\", \"%s\" FROM \"%s\".\"%s\"" + "GROUP BY \"%s\", \"%s\", \"%s\" HAVING COUNT(*) > 1", COLLECTION_NAME + "__id", "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", "field", "field1"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithHaving); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(3, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {\"testCollection__id\": \"$_id\", \"field\": \"$array.field\", \"field1\": \"$array.field1\"}, \"_f3\": {\"$sum\": 1}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, \"testCollection__id\": \"$_id.testCollection__id\", \"field\": \"$_id.field\", \"field1\": \"$_id.field1\", \"_f3\": \"$_f3\"}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"_f3\": {\"$gt\": 1}}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"testCollection__id\": \"$testCollection__id\", \"field\": \"$field\", \"field1\": \"$field1\", \"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests that a statement with project, where, group by, having, order, and limit works for a single table.") void testComplexQuery() throws SQLException { final String complexQuery = String.format( "SELECT \"%s\", \"%s\" AS \"renamed\", COUNT(*) AS \"Total\" FROM \"%s\".\"%s\"" + "WHERE \"%s\" = 'key' GROUP BY \"%s\", \"%s\", \"%s\"" + "HAVING COUNT(*) > 1 ORDER BY \"renamed\" LIMIT 1", COLLECTION_NAME + "__id", "field", getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", COLLECTION_NAME + "__id", "field", "field1"); final DocumentDbMqlQueryContext result = queryMapper.get(complexQuery); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(3, result.getColumnMetaData().size()); Assertions.assertEquals(9, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"_id\": {\"$eq\": \"key\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {" + "\"_id\": {\"testCollection__id\": \"$_id\", \"field\": \"$array.field\", \"field1\": \"$array.field1\"}, " + "\"Total\": {\"$sum\": 1}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, \"testCollection__id\": \"$_id.testCollection__id\", \"field\": \"$_id.field\", \"field1\": \"$_id.field1\", \"Total\": \"$Total\"}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"Total\": {\"$gt\": 1}}}"), result.getAggregateOperations().get(5)); Assertions.assertEquals( BsonDocument.parse("{\"$sort\": {\"field\": 1}}"), result.getAggregateOperations().get(6)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"1\"}}}"), result.getAggregateOperations().get(7)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"testCollection__id\": \"$testCollection__id\", \"renamed\": \"$field\", \"Total\": \"$Total\", \"_id\": 0}}"), result.getAggregateOperations().get(8)); } @Test @DisplayName("Tests that a statement with join works for two tables from the same collection.") void testSameCollectionJoin() throws SQLException { final String innerJoin = String.format( "SELECT * FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME, COLLECTION_NAME + "__id", COLLECTION_NAME + "_array", COLLECTION_NAME + "__id"); final DocumentDbMqlQueryContext innerJoinResult = queryMapper.get(innerJoin); Assertions.assertNotNull(innerJoinResult); Assertions.assertEquals(COLLECTION_NAME, innerJoinResult.getCollectionName()); Assertions.assertEquals(6, innerJoinResult.getColumnMetaData().size()); Assertions.assertEquals(4, innerJoinResult.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$addFields\": {\"testCollection__id0\": {\"$cond\": [{\"$or\": [{\"$ifNull\": [\"$array.field\", false]}, " + "{\"$ifNull\": [\"$array.field1\", false]}, {\"$ifNull\": [\"$array.field2\", false]}]}, \"$_id\", null]}, \"_id\": \"$_id\"}}"), innerJoinResult.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), innerJoinResult.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), innerJoinResult.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"testCollection__id\": \"$_id\", \"testCollection__id0\": \"$testCollection__id0\", \"array_index_lvl_0\": \"$array_index_lvl_0\", \"field\": \"$array.field\", \"field1\": \"$array.field1\", \"field2\": \"$array.field2\", \"_id\": 0}}"), innerJoinResult.getAggregateOperations().get(3)); final String leftJoin = String.format( "SELECT * FROM \"%s\".\"%s\"" + "LEFT JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME, COLLECTION_NAME + "__id", COLLECTION_NAME + "_array", COLLECTION_NAME + "__id"); final DocumentDbMqlQueryContext leftJoinResult = queryMapper.get(leftJoin); Assertions.assertNotNull(leftJoinResult); Assertions.assertEquals(COLLECTION_NAME, leftJoinResult.getCollectionName()); Assertions.assertEquals(6, leftJoinResult.getColumnMetaData().size()); Assertions.assertEquals(3, leftJoinResult.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$addFields\": {\"testCollection__id0\": {\"$cond\": [{\"$or\": [{\"$ifNull\": [\"$array.field\", false]}, " + "{\"$ifNull\": [\"$array.field1\", false]}, {\"$ifNull\": [\"$array.field2\", false]}]}, \"$_id\", null]}, \"_id\": \"$_id\"}}"), leftJoinResult.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), leftJoinResult.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": " + "{\"testCollection__id\": \"$_id\", " + "\"testCollection__id0\": \"$testCollection__id0\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), leftJoinResult.getAggregateOperations().get(2)); } @Test @DisplayName("Tests that a statement with join works for three tables from the same collection.") void testSameCollectionJoinWithTwoLevelNestedArray() { final String twoInnerJoins = String.format( "SELECT field1 FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\" " + "ON \"%s\".\"%s\" = \"%s\".\"%s\"" + "AND \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME, getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME, NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id", getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", "array_index_lvl_0", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array", "array_index_lvl_0"); Assertions.assertDoesNotThrow(() -> queryMapper.get(twoInnerJoins)); } @Test @DisplayName("Tests that a statement with join works for two tables from the same collection using only _id.") void testSameCollectionJoinWithTwoLevelNestedArrayUsingOnlyPrimaryKeys() { final String innerJoinRootDocumentWithNestedNestedArray = String.format( "SELECT field1 FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME, getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME, NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id"); Assertions.assertDoesNotThrow(() -> queryMapper.get(innerJoinRootDocumentWithNestedNestedArray)); } @Test @DisplayName("Tests that a statement with project, where, group by, having, order, and limit " + "works for tables from the same collection.") void testComplexQueryWithSameCollectionJoin() throws SQLException { final String complexQuery = String.format( "SELECT \"%s\" AS \"renamed\", COUNT(*) AS \"Total\" FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"" + "WHERE \"%s\" > 1 GROUP BY \"%s\".\"%s\", \"%s\", \"%s\"" + "HAVING COUNT(*) > 1 ORDER BY \"renamed\" LIMIT 1", "field", getDatabaseName(), COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME, COLLECTION_NAME + "__id", COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", "field", COLLECTION_NAME, COLLECTION_NAME + "__id", "field", "field1"); final DocumentDbMqlQueryContext result = queryMapper.get(complexQuery); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(2, result.getColumnMetaData().size()); Assertions.assertEquals(10, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$addFields\": {\"testCollection__id0\": {\"$cond\": [{\"$or\": [{\"$ifNull\": [\"$array.field2\", false]}, " + "{\"$ifNull\": [\"$array.field\", false]}, {\"$ifNull\": [\"$array.field1\", false]}]}, \"$_id\", null]}, \"_id\": \"$_id\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field2\": {\"$exists\": true}}, {\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"array.field\": {\"$gt\": 1}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {\"testCollection__id\": \"$_id\", \"field\": \"$array.field\", \"field1\": \"$array.field1\"}, \"Total\": {\"$sum\": 1}}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, \"testCollection__id\": \"$_id.testCollection__id\", \"field\": \"$_id.field\", \"field1\": \"$_id.field1\", \"Total\": \"$Total\"}}"), result.getAggregateOperations().get(5)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"Total\": {\"$gt\": 1}}}"), result.getAggregateOperations().get(6)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"renamed\": \"$field\", \"Total\": \"$Total\", \"_id\": 0}}"), result.getAggregateOperations().get(7)); Assertions.assertEquals( BsonDocument.parse("{\"$sort\": {\"renamed\": 1}}"), result.getAggregateOperations().get(8)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"1\"}}"), result.getAggregateOperations().get(9)); } @Test @DisplayName("Tests that a valid query that cannot be executed purely with aggregate throws an exception.") void testUnsupportedQuery() { // Union requires 2 separate calls. final String query = String.format("SELECT * FROM \"%s\".\"%s\" UNION SELECT \"%s\" FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, COLLECTION_NAME + "__id", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals(SqlError.lookup(SqlError.UNSUPPORTED_SQL, query), Assertions.assertThrows(SQLException.class, () -> queryMapper.get(query)) .getMessage()); } @Test @DisplayName("Tests that an invalid query throws an exception.") void testInvalidQuery() { // Column counts here are mismatched so this is invalid sql. final String query = String.format("SELECT * FROM \"%s\".\"%s\" UNION SELECT * FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals(String.format("Unable to parse SQL" + " 'SELECT * FROM \"database\".\"testCollection\" UNION SELECT * FROM \"database\".\"testCollection_array\"'. --" + " Reason: 'At line 1, column 56: Column count mismatch in UNION'"), Assertions.assertThrows(SQLException.class, () -> queryMapper.get(query)) .getMessage()); } @Test @DisplayName("Tests a simple query with a join between 2 different collections.") void testDifferentCollectionJoin() throws SQLException { final String innerJoin = String.format( "SELECT * FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array", getDatabaseName(), OTHER_COLLECTION_NAME + "_otherArray", COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", OTHER_COLLECTION_NAME + "_otherArray", OTHER_COLLECTION_NAME + "__id"); DocumentDbMqlQueryContext result = queryMapper.get(innerJoin); Assertions.assertNotNull(result); Assertions.assertEquals(9, result.getColumnMetaData().size()); Assertions.assertEquals(5, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": " + "[{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}] }}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$lookup\": {" + "\"from\": \"otherTestCollection\", " + "\"let\": {\"field1\": \"$array.field1\", \"field\": \"$array.field\", \"array_index_lvl_0\": \"$array_index_lvl_0\", \"field2\": \"$array.field2\", \"testCollection__id\": \"$_id\"}, " + "\"pipeline\": [" + "{\"$unwind\": {\"path\": \"$otherArray\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"otherArray_index_lvl_0\"}}, " + "{\"$match\": {\"$or\": [{\"otherArray.field\": {\"$exists\": true}}, {\"otherArray.field3\": {\"$exists\": true}}]}}, " + "{\"$match\": {\"$expr\": {\"$eq\": [\"$$testCollection__id\", \"$_id\"]}}}], " + "\"as\": \"otherTestCollection_otherArray\"}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$otherTestCollection_otherArray\", \"preserveNullAndEmptyArrays\": false}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"otherTestCollection__id\": \"$otherTestCollection_otherArray._id\", " + "\"otherArray_index_lvl_0\": \"$otherTestCollection_otherArray.otherArray_index_lvl_0\", " + "\"field0\": \"$otherTestCollection_otherArray.otherArray.field\", " + "\"field3\": \"$otherTestCollection_otherArray.otherArray.field3\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(4)); final String leftJoin = String.format( "SELECT * FROM \"%s\".\"%s\"" + "LEFT JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array", getDatabaseName(), OTHER_COLLECTION_NAME + "_otherArray", COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", OTHER_COLLECTION_NAME + "_otherArray", OTHER_COLLECTION_NAME + "__id"); result = queryMapper.get(leftJoin); Assertions.assertNotNull(result); Assertions.assertEquals(9, result.getColumnMetaData().size()); Assertions.assertEquals(5, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": " + "[{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}] }}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$lookup\": {" + "\"from\": \"otherTestCollection\", " + "\"let\": {\"field1\": \"$array.field1\", \"field\": \"$array.field\", \"array_index_lvl_0\": \"$array_index_lvl_0\", \"field2\": \"$array.field2\", \"testCollection__id\": \"$_id\"}, " + "\"pipeline\": [" + "{\"$unwind\": {\"path\": \"$otherArray\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"otherArray_index_lvl_0\"}}, " + "{\"$match\": {\"$or\": [{\"otherArray.field\": {\"$exists\": true}}, {\"otherArray.field3\": {\"$exists\": true}}]}}, " + "{\"$match\": {\"$expr\": {\"$eq\": [\"$$testCollection__id\", \"$_id\"]}}}], " + "\"as\": \"otherTestCollection_otherArray\"}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$otherTestCollection_otherArray\", \"preserveNullAndEmptyArrays\": true}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"otherTestCollection__id\": \"$otherTestCollection_otherArray._id\", " + "\"otherArray_index_lvl_0\": \"$otherTestCollection_otherArray.otherArray_index_lvl_0\", " + "\"field0\": \"$otherTestCollection_otherArray.otherArray.field\", " + "\"field3\": \"$otherTestCollection_otherArray.otherArray.field3\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(4)); } @Test @DisplayName("Tests that a statement with project, where, group by, having, order, and limit " + "works for tables from different collections.") void testComplexQueryWithDifferentCollectionJoin() throws SQLException { final String complexQuery = String.format( "SELECT \"%s\" AS \"renamed\", COUNT(*) AS \"Total\" FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"" + "WHERE \"%s\" > 1 GROUP BY \"%s\".\"%s\", \"%s\", \"%s\"" + "HAVING COUNT(*) > 1 ORDER BY \"renamed\" LIMIT 1", "field", getDatabaseName(), OTHER_COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array", OTHER_COLLECTION_NAME, OTHER_COLLECTION_NAME + "__id", COLLECTION_NAME + "_array", COLLECTION_NAME + "__id", "field", OTHER_COLLECTION_NAME, OTHER_COLLECTION_NAME + "__id", "field", "field1"); final DocumentDbMqlQueryContext result = queryMapper.get(complexQuery); Assertions.assertNotNull(result); Assertions.assertEquals(OTHER_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(2, result.getColumnMetaData().size()); Assertions.assertEquals(9, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$lookup\": {" + "\"from\": \"testCollection\", " + "\"let\": {\"otherTestCollection__id\": \"$_id\"}, " + "\"pipeline\": [" + "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}, " + "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}, " + "{\"$match\": {\"$expr\": {\"$eq\": [\"$$otherTestCollection__id\", \"$_id\"]}}}], \"as\": \"testCollection_array\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$unwind\": {\"path\": \"$testCollection_array\", \"preserveNullAndEmptyArrays\": false}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"testCollection_array.array.field\": {\"$gt\": 1}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {\"otherTestCollection__id\": \"$_id\", \"field\": \"$testCollection_array.array.field\", \"field1\": \"$testCollection_array.array.field1\"}, \"Total\": {\"$sum\": 1}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, \"otherTestCollection__id\": \"$_id.otherTestCollection__id\", \"field\": \"$_id.field\", \"field1\": \"$_id.field1\", \"Total\": \"$Total\"}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"Total\": {\"$gt\": 1}}}"), result.getAggregateOperations().get(5)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"renamed\": \"$field\", \"Total\": \"$Total\", \"_id\": 0}}"), result.getAggregateOperations().get(6)); Assertions.assertEquals( BsonDocument.parse("{\"$sort\": {\"renamed\": 1}}"), result.getAggregateOperations().get(7)); Assertions.assertEquals( BsonDocument.parse("{\"$limit\": {\"$numberLong\": \"1\"}}"), result.getAggregateOperations().get(8)); } @Test @DisplayName("Tests that unsupported join conditions or types throw an exception.") void testUnsupportedJoins() { // Cannot do right join on tables from different collections. final String rightJoinQuery = String.format( "SELECT * FROM \"%s\".\"%s\"" + " RIGHT JOIN \"%s\".\"%s\"" + " ON \"%s\" = \"%s\"", getDatabaseName(), COLLECTION_NAME + "_array", getDatabaseName(), OTHER_COLLECTION_NAME + "_otherArray", COLLECTION_NAME + "__id", OTHER_COLLECTION_NAME + "__id"); String message = Assertions .assertThrows(SQLException.class, () -> queryMapper.get(rightJoinQuery)) .getMessage(); Assertions.assertTrue(message.contains("Unable to parse SQL")); Assertions.assertTrue(message.contains(SqlError.lookup(SqlError.UNSUPPORTED_JOIN_TYPE, "RIGHT"))); // Cannot do a full outer join on tables from different collections. final String fullJoinQuery = String.format( "SELECT * FROM \"%s\".\"%s\"" + "FULL JOIN \"%s\".\"%s\"" + "ON \"%s\" = \"%s\"", getDatabaseName(), COLLECTION_NAME + "_array", getDatabaseName(), OTHER_COLLECTION_NAME + "_otherArray", COLLECTION_NAME + "__id", OTHER_COLLECTION_NAME + "__id"); message = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(fullJoinQuery)) .getMessage(); Assertions.assertTrue(message.contains("Unable to parse SQL")); Assertions.assertTrue(message.contains(SqlError.lookup(SqlError.UNSUPPORTED_JOIN_TYPE, "FULL"))); // Can only have a single equi-condition for a join between tables from same collection. final String multipleConditionsQuery = String.format( "SELECT * FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\" = \"%s\"" + "OR \"%s\" > \"%s\"", getDatabaseName(), COLLECTION_NAME + "_array", getDatabaseName(), OTHER_COLLECTION_NAME + "_otherArray", COLLECTION_NAME + "__id", OTHER_COLLECTION_NAME + "__id", COLLECTION_NAME + "__id", OTHER_COLLECTION_NAME + "__id"); message = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(multipleConditionsQuery)) .getMessage(); Assertions.assertTrue(message.contains("Unable to parse SQL")); Assertions.assertTrue(message.contains(SqlError.lookup(SqlError.SINGLE_EQUIJOIN_ONLY))); // Can only join tables from same collection on foreign keys. final String nonForeignKeyQuery = String.format( "SELECT * FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME, COLLECTION_NAME + "__id", COLLECTION_NAME + "_array", "field"); message = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(nonForeignKeyQuery)) .getMessage(); Assertions.assertTrue(message.contains("Unable to parse SQL")); Assertions.assertTrue(message.contains(SqlError.lookup(SqlError.EQUIJOINS_ON_FK_ONLY))); // Can only join tables from same collection on foreign keys. final String nonEqualityQuery = String.format( "SELECT * FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" > \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME, COLLECTION_NAME + "__id", COLLECTION_NAME + "_array", "field"); message = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(nonEqualityQuery)) .getMessage(); Assertions.assertTrue(message.contains("Unable to parse SQL")); Assertions.assertTrue(message.contains(SqlError.lookup(SqlError.EQUIJOINS_ON_FK_ONLY))); final String innerJoin = String.format( "SELECT field1 FROM \"%s\".\"%s\"" + "INNER JOIN \"%s\".\"%s\"" + "ON \"%s\".\"%s\" = \"%s\".\"%s\"", getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array", getDatabaseName(), NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "_array_array2", NESTED_DOCUMENT_IN_NESTED_ARRAY_COLLECTION_NAME + "__id"); message = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(innerJoin)) .getMessage(); Assertions.assertTrue(message.contains(SqlError.lookup(SqlError.JOIN_MISSING_PRIMARY_KEYS,"[array_index_lvl_0]"))); } @Test @DisplayName("Tests SUM(1), and that field names generated by Calcite have $ symbols removed.") void testQueryWithSumOne() throws SQLException { final String query = String.format( "SELECT SUM(1) FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse("{\"$project\": {\"_f0\": {\"$literal\": 1}, \"_id\": 0}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {\"_id\": {}, \"EXPR$0\": {\"$push\": \"$_f0\"}}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, " + "\"EXPR$0\": {\"$cond\": [{\"$gt\": [{\"$size\": {\"$filter\": {\"input\": \"$EXPR$0\", " + "\"cond\": {\"$gt\": [\"$$this\", null]}}}}, 0]}, {\"$sum\": \"$EXPR$0\"}, null]}}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests CASE with one field, and three sections.") void testQueryWithCASE() throws SQLException { final String query = String.format( "SELECT CASE " + "WHEN \"field\" > 10 THEN 'A' " + "WHEN \"field\" > 5 THEN 'B' " + "ELSE 'C' END FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$cond\": [{\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, " + "{\"$gt\": [{\"$literal\": 10}, null]}]}, " + "{\"$gt\": [\"$array.field\", {\"$literal\": 10}]}, null]}, {\"$literal\": \"A\"}, " + "{\"$cond\": [{\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, {\"$gt\": [{\"$literal\": 5}, null]}]}, " + "{\"$gt\": [\"$array.field\", {\"$literal\": 5}]}, null]}, {\"$literal\": \"B\"}, {\"$literal\": \"C\"}]}]}, " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests a query with a where clause comparing two literals.") void testWhereTwoLiterals() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE 2 > 1", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"testCollection__id\": \"$_id\", \"_id\": 0}}"), result.getAggregateOperations().get(0)); } @Test @DisplayName("Tests queries with SUBSTRING") void testQuerySubstring() throws SQLException { final String query = String.format( "SELECT SUBSTRING(\"field\", 4, 2) FROM \"%s\".\"%s\" WHERE SUBSTRING(\"field\", 2, 3) = 'abc'" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, " + "\"array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$substrCP\": [\"$array.field\", {\"$subtract\": [{\"$literal\": 2}, 1]}, {\"$literal\": 3}]}, null]}, " + "{\"$gt\": [{\"$literal\": \"abc\"}, null]}]}, {\"$eq\": [{\"$substrCP\": [\"$array.field\", {\"$subtract\": [{\"$literal\": 2}, 1]}, {\"$literal\": 3}]}, " + "{\"$literal\": \"abc\"}]}, null]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$substrCP\": [\"$array.field\", {\"$subtract\": [{\"$literal\": 4}, 1]}, {\"$literal\": 2}]}, \"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests queries with substring containing expressions works.") void testQuerySubstringExpr() throws SQLException { final String query = String.format( "SELECT SUBSTRING(\"field\", \"field2\", \"field1\" - \"field2\") " + "FROM \"%s\".\"%s\" WHERE SUBSTRING(\"field\", \"field2\", \"field1\" + \"field2\") = 'abcd'", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, \"" + "array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$substrCP\": [\"$array.field\", {\"$subtract\": [\"$array.field2\", 1]}, " + "{\"$add\": [\"$array.field1\", \"$array.field2\"]}]}, null]}, " + "{\"$gt\": [{\"$literal\": \"abcd\"}, null]}]}, {\"$eq\": [{\"$substrCP\": [\"$array.field\", {\"$subtract\": [\"$array.field2\", 1]}, " + "{\"$add\": [\"$array.field1\", \"$array.field2\"]}]}, {\"$literal\": \"abcd\"}]}, null]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$substrCP\": [\"$array.field\", {\"$subtract\": [\"$array.field2\", 1]}, {\"$subtract\": [\"$array.field1\", \"$array.field2\"]}]}, \"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests that unquoted identifiers retain their casing but are evaluated case-sensitively.") void testQueryWithUnquotedIdentifiers() throws SQLException { final String correctCasing = String.format("SELECT * FROM %s.%s", getDatabaseName(), COLLECTION_NAME); final DocumentDbMqlQueryContext result = queryMapper.get(correctCasing); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); final String incorrectCasing = String.format("SELECT * FROM %s.%s", getDatabaseName(), COLLECTION_NAME.toUpperCase()); Assertions.assertEquals(String.format( "Unable to parse SQL 'SELECT * FROM database.TESTCOLLECTION'. --" + " Reason: 'From line 1, column 15 to line 1, column 37:" + " Object 'TESTCOLLECTION' not found within 'database'; did you mean 'testCollection'?'"), Assertions.assertThrows(SQLException.class, () -> queryMapper.get(incorrectCasing)) .getMessage()); } @Test @DisplayName("Tests queries with where clause containing nested AND.") void testQueryAndWithTypes() throws SQLException { final String query = String.format( "SELECT \"field\" > '2021-01-01' AND \"field\" < '2020-02-01' FROM \"%s\".\"%s\"", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(DATE_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"EXPR$0\": {\"$cond\": [{\"$and\": [{\"$eq\": [true, null]}, " + "{\"$eq\": [true, {\"$lte\": [\"$field\", null]}]}]}, " + "true, " + "{\"$cond\": [" + "{\"$or\": [" + "{\"$eq\": [false, null]}, " + "{\"$eq\": [false, {\"$lte\": [\"$field\", null]}]}]}, " + "false, null]}]}, " + "\"_id\": 0}}"), result.getAggregateOperations().get(0)); } @Test @DisplayName("Tests querying when select list exceeds max field limit for $project.") void testLargeSelectList() throws SQLException { final String query = String.format( "SELECT \"%1$s\" AS \"1\", \"%1$s\" AS \"2\", \"%1$s\" AS \"3\", \"%1$s\" AS \"4\", \"%1$s\" AS \"5\", " + "\"%1$s\" AS \"6\", \"%1$s\" AS \"7\", \"%1$s\" AS \"8\", \"%1$s\" AS \"9\", \"%1$s\" AS \"10\"," + "\"%1$s\" AS \"11\", \"%1$s\" AS \"12\", \"%1$s\" AS \"13\", \"%1$s\" AS \"14\", \"%1$s\" AS \"15\", " + "\"%1$s\" AS \"16\", \"%1$s\" AS \"17\", \"%1$s\" AS \"18\", \"%1$s\" AS \"19\", \"%1$s\" AS \"20\", " + "\"%1$s\" AS \"21\", \"%1$s\" AS \"22\", \"%1$s\" AS \"23\", \"%1$s\" AS \"24\", \"%1$s\" AS \"25\", " + "\"%1$s\" AS \"26\", \"%1$s\" AS \"27\", \"%1$s\" AS \"28\", \"%1$s\" AS \"29\", \"%1$s\" AS \"30\"," + "\"%1$s\" AS \"31\", \"%1$s\" AS \"32\", \"%1$s\" AS \"33\", \"%1$s\" AS \"34\", \"%1$s\" AS \"35\", " + "\"%1$s\" AS \"36\", \"%1$s\" AS \"37\", \"%1$s\" AS \"38\", \"%1$s\" AS \"39\", \"%1$s\" AS \"40\", " + "\"%1$s\" AS \"41\", \"%1$s\" AS \"42\", \"%1$s\" AS \"43\", \"%1$s\" AS \"44\", \"%1$s\" AS \"45\", " + "\"%1$s\" AS \"46\", \"%1$s\" AS \"47\", \"%1$s\" AS \"48\", \"%1$s\" AS \"49\", \"%1$s\" AS \"50\"," + "\"%1$s\" AS \"51\" FROM \"%2$s\".\"%3$s\"", "field", getDatabaseName(), DATE_COLLECTION_NAME); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertEquals(DATE_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(51, result.getColumnMetaData().size()); Assertions.assertEquals(0, result.getAggregateOperations().size()); } @Test @DisplayName("Tests that $addFields operation is added before $unwind except when $unwind is needed for the $addFields.") void testJoinOpOrder() throws SQLException { String query = String.format( "SELECT * FROM \"%1$s\".\"%2$s\"" + " LEFT JOIN \"%1$s\".\"%3$s\" ON \"%2$s\".\"%2$s__id\" = \"%3$s\".\"%2$s__id\"", getDatabaseName(), COLLECTION_NAME, COLLECTION_NAME + "_array"); DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(6, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$addFields\": {\"testCollection__id0\": {\"$cond\": [{\"$or\": [" + "{\"$ifNull\": [\"$array.field\", false]}, " + "{\"$ifNull\": [\"$array.field1\", false]}, {\"$ifNull\": [\"$array.field2\", false]}]}, \"$_id\", null]}, " + "\"_id\": \"$_id\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"testCollection__id\": \"$_id\", " + "\"testCollection__id0\": \"$testCollection__id0\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); query = String.format( "SELECT * FROM \"%1$s\".\"%2$s\" " + "LEFT OUTER JOIN \"%1$s\".\"%3$s\" " + "ON \"%2$s\".\"%4$s\" = \"%3$s\".\"%4$s\" " + "AND \"%2$s\".\"%5$s\" = \"%3$s\".\"%5$s\"", getDatabaseName(), COLLECTION_NAME + "_array", COLLECTION_NAME + "_array_field3", COLLECTION_NAME + "__id", "array_index_lvl_0"); result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(8, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$addFields\": {" + "\"testCollection__id0\": {\"$cond\": [{\"$ifNull\": [\"$array.field3.field4\", false]}, \"$_id\", null]}, " + "\"_id\": {\"$cond\": [{\"$or\": [{\"$ifNull\": [\"$array.field\", false]}, {\"$ifNull\": [\"$array.field1\", false]}, {\"$ifNull\": [\"$array.field2\", false]}]}, \"$_id\", null]}, " + "\"array_index_lvl_00\": {\"$cond\": [{\"$ifNull\": [\"$array.field3.field4\", false]}, \"$array_index_lvl_0\", null]}, " + "\"array_index_lvl_0\": {\"$cond\": [{\"$or\": [{\"$ifNull\": [\"$array.field\", false]}, {\"$ifNull\": [\"$array.field1\", false]}, {\"$ifNull\": [\"$array.field2\", false]}]}, \"$array_index_lvl_0\", null]}}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"testCollection__id0\": \"$testCollection__id0\", " + "\"array_index_lvl_00\": \"$array_index_lvl_00\", " + "\"field4\": \"$array.field3.field4\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests that fields can be selected with the column name '_id' ") void testIdAsColumnName() throws SQLException { // Get a base table with a rename. final String basicQuery = String.format("SELECT \"testCollection__id\" AS \"_id\" FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME); DocumentDbMqlQueryContext result = queryMapper.get(basicQuery); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(1, result.getAggregateOperations().size()); // Make sure there is no $_id: 0 here. Assertions.assertEquals(BsonDocument.parse("{\"$project\": {\"_id\": '$_id'} }"), result.getAggregateOperations().get(0)); // Get a table with nested _id. final String nestedTableQuery = String.format("SELECT * FROM \"%s\".\"%s\"", getDatabaseName(), NESTED_ID_COLLECTION_NAME + "_document"); result = queryMapper.get(nestedTableQuery); Assertions.assertNotNull(result); Assertions.assertEquals(NESTED_ID_COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(3, result.getColumnMetaData().size()); Assertions.assertEquals(2, result.getAggregateOperations().size()); // Make sure there is no $_id: 0 here. Assertions.assertEquals(BsonDocument.parse( "{\"$project\": {\"nestedIdCollection__id\": \"$_id\", \"_id\": \"$document._id\", \"field1\": \"$document.field1\"}}"), result.getAggregateOperations().get(1)); } @Test @DisplayName("Tests that calls to SUM where empty should return 0 will only use $sum") void testAggregateWithSumZero() throws SQLException { final String queryWithSum = String.format( "SELECT SUM(DISTINCT \"%s\") / COUNT(DISTINCT \"%s\") FROM \"%s\".\"%s\"", "field", "field", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(queryWithSum); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(1, result.getColumnMetaData().size()); Assertions.assertEquals(5, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{ \"$unwind\": {" + "\"path\": \"$array\", " + "\"includeArrayIndex\" : \"array_index_lvl_0\", " + "\"preserveNullAndEmptyArrays\": true }}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse("{\"$match\": {\"$or\": [" + "{\"array.field\": {\"$exists\": true}}, " + "{\"array.field1\": {\"$exists\": true}}, " + "{\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$group\": {" + "\"_id\": {}, " + "\"_f0\": {\"$addToSet\": \"$array.field\"}, " + "\"_f1\": {\"$addToSet\": \"$array.field\"}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"_id\": 0, \"_f0\": {\"$sum\": \"$_f0\"}, \"_f1\": {\"$size\": {\"$filter\": {\"input\": \"$_f1\", \"cond\": {\"$gt\": [\"$$this\", null]}}}}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {\"EXPR$0\": {\"$divide\": [{\"$cond\": [{\"$cond\": [{\"$and\": [{\"$gt\": [\"$_f1\", null]}, {\"$gt\": [{\"$literal\": 0}, null]}]}, {\"$eq\": [\"$_f1\", {\"$literal\": 0}]}, null]}, null, \"$_f0\"]}, \"$_f1\"]}, \"_id\": 0}}"), result.getAggregateOperations().get(4)); } }
4,538
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceFilterTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbFilter; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import java.sql.SQLException; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbQueryMappingServiceFilterTest extends DocumentDbQueryMappingServiceTest { private static final String COLLECTION_NAME = "testCollection"; private static DocumentDbQueryMappingService queryMapper; @BeforeAll void initialize() throws SQLException { final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", " + "\"document\": { \"booleanField\": true, \"booleanField2\": false} " + "\"array\" : [ { \"field\" : 1, \"field1\": \"value\" }, { \"field\" : 2, \"field2\" : \"value\" } ]}"); insertBsonDocuments(COLLECTION_NAME, new BsonDocument[]{document}); queryMapper = getQueryMappingService(); } @Test @DisplayName("Test queries with WHERE f1 IN (c1, c2...)") void testQueryWithIn() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE \"field\" IN (2, 3)" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$eq\": 2}}, {\"array.field\": {\"$eq\": 3}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Test queries with WHERE f1 NOT IN (c1, c2...)") void testQueryWithNotIn() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE \"field\" NOT IN (2, 3)" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$and\": [{\"array.field\": {\"$nin\": [null, 2]}}, {\"array.field\": {\"$nin\": [null, 3]}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests queries with IS [NOT] NULL") void testQueryIsNull() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" WHERE \"field\" IS NULL OR \"field1\" IS NOT NULL" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$eq\": null }},{\"array.field1\": {\"$ne\": null }}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests queries with where clause containing arithmetic.") void testQueryArithmeticWhere() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" * \"field1\" / \"field2\" + \"field1\" - \"field2\" = 7", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, " + "\"array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$subtract\": [{\"$add\": [{\"$divide\": [{\"$multiply\": [\"$array.field\", \"$array.field1\"]}, \"$array.field2\"]}, \"$array.field1\"]}, \"$array.field2\"]}, null]}, " + "{\"$gt\": [{\"$literal\": 7}, null]}]}, {\"$eq\": [{\"$subtract\": [{\"$add\": [{\"$divide\": [{\"$multiply\": [\"$array.field\", \"$array.field1\"]}, \"$array.field2\"]}, \"$array.field1\"]}, \"$array.field2\"]}, " + "{\"$literal\": 7}]}, null]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests queries with where clause containing modulo.") void testQueryModuloWhere() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE MOD(\"field\", 3) = 2" + "OR MOD(8, \"field\") = 2" + "OR MOD(3, 2) = \"field\"", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, " + "\"array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$or\": [{\"$eq\": [true, {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$mod\": [\"$array.field\", {\"$literal\": 3}]}, null]}, " + "{\"$gt\": [{\"$literal\": 2}, null]}]}, {\"$eq\": [{\"$mod\": [\"$array.field\", {\"$literal\": 3}]}, {\"$literal\": 2}]}, null]}]}, " + "{\"$eq\": [true, {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$mod\": [{\"$literal\": 8}, \"$array.field\"]}, null]}, {\"$gt\": [{\"$literal\": 2}, null]}]}, " + "{\"$eq\": [{\"$mod\": [{\"$literal\": 8}, \"$array.field\"]}, {\"$literal\": 2}]}, null]}]}, {\"$eq\": [true, {\"$cond\": [{\"$and\": [" + "{\"$gt\": [{\"$literal\": 1}, null]}, {\"$gt\": [\"$array.field\", null]}]}, " + "{\"$eq\": [{\"$literal\": 1}, \"$array.field\"]}, null]}]}]}, true, {\"$cond\": [{\"$and\": [{\"$eq\": [false, {\"$cond\": [{\"$and\": [" + "{\"$gt\": [{\"$mod\": [\"$array.field\", {\"$literal\": 3}]}, null]}, " + "{\"$gt\": [{\"$literal\": 2}, null]}]}, {\"$eq\": [{\"$mod\": [\"$array.field\", {\"$literal\": 3}]}, {\"$literal\": 2}]}, null]}]}, " + "{\"$eq\": [false, {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$mod\": [{\"$literal\": 8}, \"$array.field\"]}, null]}, " + "{\"$gt\": [{\"$literal\": 2}, null]}]}, " + "{\"$eq\": [{\"$mod\": [{\"$literal\": 8}, \"$array.field\"]}, {\"$literal\": 2}]}, null]}]}, {\"$eq\": [false, {\"$cond\": [{\"$and\": [" + "{\"$gt\": [{\"$literal\": 1}, null]}, {\"$gt\": [\"$array.field\", null]}]}, " + "{\"$eq\": [{\"$literal\": 1}, \"$array.field\"]}, null]}]}]}, false, null]}]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests queries with where clause containing nested OR.") void testQueryWhereNestedOr() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" > 0 OR (\"field1\" > 0 OR \"field2\" > 6)", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$gt\": 0}}, {\"array.field1\": {\"$gt\": 0}}, {\"array.field2\": {\"$gt\": 6}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests queries with where clause containing nested AND.") void testQueryWhereNestedAnd() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" > 0 AND (\"field1\" > 0 AND \"field2\" > 6)", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$and\": [{\"array.field\": {\"$gt\": 0}}, {\"array.field1\": {\"$gt\": 0}}, {\"array.field2\": {\"$gt\": 6}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests queries with where clause containing nested combined NOT, OR, and AND.") void testQueryWhereNotAndOr() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE ((NOT \"field\" > 0 AND \"field2\" < 10) AND (NOT \"field1\" > 0 OR \"field2\" > 6)) OR \"field2\" > 0", getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(4, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [" + "{\"$and\": [{\"array.field\": {\"$lte\": 0}}, {\"array.field2\": {\"$lt\": 10}}, " + "{\"$or\": [{\"array.field1\": {\"$lte\": 0}}, {\"array.field2\": {\"$gt\": 6}}]}]}, " + "{\"array.field2\": {\"$gt\": 0}}]}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(3)); } @Test @DisplayName("Tests queries with where clause comparing two columns.") void testQueryWhereTwoColumns() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" = \"field2\"" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, " + "\"array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, " + "{\"$gt\": [\"$array.field2\", null]}]}, " + "{\"$eq\": [\"$array.field\", \"$array.field2\"]}, null]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests that queries comparing the result of simple comparisons use the aggregate operator syntax.") void testQueryWhereNestedCompare() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE (\"field\" IS NULL) = (\"field2\" IS NULL)" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, " + "\"array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + "\"placeholderField1F84EB1G3K47\"" + ": {\"$cond\": [{\"$and\": [{\"$gt\": [{\"$lte\": [\"$array.field\", null]}, null]}, " + "{\"$gt\": [{\"$lte\": [\"$array.field2\", null]}, null]}]}, " + "{\"$eq\": [{\"$lte\": [\"$array.field\", null]}, {\"$lte\": [\"$array.field2\", null]}]}, null]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(5)); } @Test @DisplayName("Tests queries with where clause using value of boolean columns.") void testQueryWhereBooleanFields() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"booleanField\" AND NOT \"booleanField2\"" , getDatabaseName(), COLLECTION_NAME + "_document"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(3, result.getColumnMetaData().size()); Assertions.assertEquals(3, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"document.booleanField\": {\"$exists\": true}}, {\"document.booleanField2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$and\": [{\"document.booleanField\": true}, {\"document.booleanField2\": false}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"booleanField\": \"$document.booleanField\", " + "\"booleanField2\": \"$document.booleanField2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(2)); } @Test @DisplayName("Tests queries with where clause combining literal comparison and field vs field comparison.") void testQueryWhereFieldAndSimpleComparison() throws SQLException { final String query = String.format( "SELECT * FROM \"%s\".\"%s\" " + "WHERE \"field\" < 2 OR \"field\" = \"field2\"" , getDatabaseName(), COLLECTION_NAME + "_array"); final DocumentDbMqlQueryContext result = queryMapper.get(query); Assertions.assertNotNull(result); Assertions.assertEquals(COLLECTION_NAME, result.getCollectionName()); Assertions.assertEquals(5, result.getColumnMetaData().size()); Assertions.assertEquals(6, result.getAggregateOperations().size()); Assertions.assertEquals( BsonDocument.parse( "{\"$unwind\": {\"path\": \"$array\", \"preserveNullAndEmptyArrays\": true, \"includeArrayIndex\": \"array_index_lvl_0\"}}"), result.getAggregateOperations().get(0)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {\"$or\": [{\"array.field\": {\"$exists\": true}}, {\"array.field1\": {\"$exists\": true}}, {\"array.field2\": {\"$exists\": true}}]}}"), result.getAggregateOperations().get(1)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"_id\": 1, " + "\"array_index_lvl_0\": 1, " + "\"array.field\": 1, " + "\"array.field1\": 1, " + "\"array.field2\": 1, " + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$cond\": [{\"$or\": [{\"$eq\": [true, {\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, {\"$gt\": [{\"$literal\": 2}, null]}]}, " + "{\"$lt\": [\"$array.field\", {\"$literal\": 2}]}, null]}]}, " + "{\"$eq\": [true, {\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, {\"$gt\": [\"$array.field2\", null]}]}, " + "{\"$eq\": [\"$array.field\", \"$array.field2\"]}, null]}]}]}, true, " + "{\"$cond\": [{\"$and\": [{\"$eq\": [false, {\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, {\"$gt\": [{\"$literal\": 2}, null]}]}, " + "{\"$lt\": [\"$array.field\", {\"$literal\": 2}]}, null]}]}, {\"$eq\": [false, {\"$cond\": [{\"$and\": [{\"$gt\": [\"$array.field\", null]}, {\"$gt\": [\"$array.field2\", null]}]}, " + "{\"$eq\": [\"$array.field\", \"$array.field2\"]}, null]}]}]}, false, null]}]}}}"), result.getAggregateOperations().get(2)); Assertions.assertEquals( BsonDocument.parse( "{\"$match\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"), result.getAggregateOperations().get(3)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + DocumentDbFilter.BOOLEAN_FLAG_FIELD + ": 0}}"), result.getAggregateOperations().get(4)); Assertions.assertEquals( BsonDocument.parse( "{\"$project\": {" + "\"testCollection__id\": \"$_id\", " + "\"array_index_lvl_0\": \"$array_index_lvl_0\", " + "\"field\": \"$array.field\", " + "\"field1\": \"$array.field1\", " + "\"field2\": \"$array.field2\", " + "\"_id\": 0}}"), result.getAggregateOperations().get(5)); } }
4,539
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingServiceTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query; import com.mongodb.client.MongoClient; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.bson.BsonDocument; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.sql.SQLException; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_NEW; public class DocumentDbQueryMappingServiceTest extends DocumentDbFlapDoodleTest { private static final String DATABASE_NAME = "database"; private static final String USER = "user"; private static final String PASSWORD = "password"; private DocumentDbConnectionProperties connectionProperties; private MongoClient client; @BeforeAll @SuppressFBWarnings(value = "HARD_CODE_PASSWORD", justification = "Hardcoded for test purposes only") void setup() { connectionProperties = new DocumentDbConnectionProperties(); createUser(DATABASE_NAME, USER, PASSWORD); connectionProperties.setUser(USER); connectionProperties.setPassword(PASSWORD); connectionProperties.setDatabase(DATABASE_NAME); connectionProperties.setTlsEnabled("false"); connectionProperties.setHostname("localhost:" + getMongoPort()); client = createMongoClient(ADMIN_DATABASE, USER, PASSWORD); } @AfterAll void teardown() throws Exception { try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(connectionProperties, client)) { schemaWriter.remove("id"); } client.close(); } protected void insertBsonDocuments(final String collectionName, final BsonDocument[] documents) { insertBsonDocuments(collectionName, DATABASE_NAME, documents, client); } protected DocumentDbQueryMappingService getQueryMappingService() throws SQLException { final DocumentDbDatabaseSchemaMetadata databaseMetadata = DocumentDbDatabaseSchemaMetadata.get(connectionProperties, "id", VERSION_NEW, client); return new DocumentDbQueryMappingService(connectionProperties, databaseMetadata); } protected static String getDatabaseName() { return DATABASE_NAME; } }
4,540
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/limitations/DocumentDbSqlInjectionTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query.limitations; import org.bson.BsonArray; import org.bson.BsonDocument; import org.bson.BsonValue; import org.bson.conversions.Bson; import org.checkerframework.checker.nullness.qual.NonNull; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.query.DocumentDbMqlQueryContext; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingService; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingServiceTest; import java.sql.SQLException; import java.util.Collections; import java.util.List; import java.util.Map; import static software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbRules.quote; @ExtendWith(DocumentDbFlapDoodleExtension.class) class DocumentDbSqlInjectionTest extends DocumentDbQueryMappingServiceTest { private static final String COLLECTION_NAME = "testCollectionInjectionTest"; private static final String OTHER_COLLECTION_NAME = "otherTestCollectionInjectionTest"; private DocumentDbQueryMappingService queryMapper; @BeforeAll void beforeAll() throws SQLException { final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ " + "{ \"field\" : 1, \"field1\": \"value\" }, " + "{ \"field\" : 2, \"field2\" : \"value\" , \"field3\" : { \"field4\": 3} } ]}"); final BsonDocument otherDocument = BsonDocument.parse("{\"_id\": \"key1\", \"otherArray\": [" + "{\"field\": 1, \"field3\": \"value\"}, " + "{\"field\": 2, \"field3\": \"value\"}]}"); insertBsonDocuments(COLLECTION_NAME, new BsonDocument[] {document}); insertBsonDocuments(OTHER_COLLECTION_NAME, new BsonDocument[] {otherDocument}); queryMapper = getQueryMappingService(); } @Test void testMongoInjections() throws SQLException { final String primaryKeyColumnName = COLLECTION_NAME + "__id"; final String expectedKey = "$delete"; final String injection = String.format( "\"}, {$delete: {\"%1$s\", \"1\"}", primaryKeyColumnName); final String query = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = '%6$s'", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", injection); final DocumentDbMqlQueryContext queryContext = queryMapper.get(query); Assertions.assertNotNull(queryContext); final List<Bson> aggregateOperations = queryContext.getAggregateOperations(); // Assert that the attempted injection did not add a '$delete' operation. assertKeyNotExists(expectedKey, aggregateOperations); // Assert that the attempted injection is interpreted as a '$literal' value assertValueExists(injection, aggregateOperations); final String query2 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM (SELECT * FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = '%6$s')", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", injection); final DocumentDbMqlQueryContext queryContext2 = queryMapper.get(query2); Assertions.assertNotNull(queryContext2); final List<Bson> aggregateOperations2 = queryContext2.getAggregateOperations(); assertKeyNotExists(expectedKey, aggregateOperations2); assertValueExists(injection, aggregateOperations2); final String query3 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = SUBSTRING('%6$s', 1, 2000)", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", injection); final DocumentDbMqlQueryContext queryContext3 = queryMapper.get(query3); Assertions.assertNotNull(queryContext3); final List<Bson> aggregateOperations3 = queryContext3.getAggregateOperations(); assertKeyNotExists(expectedKey, aggregateOperations3); assertValueExists(injection, aggregateOperations3); final String query4 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = CONCAT('%6$s', '')", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", injection); final DocumentDbMqlQueryContext queryContext4 = queryMapper.get(query4); Assertions.assertNotNull(queryContext4); final List<Bson> aggregateOperations4 = queryContext4.getAggregateOperations(); assertKeyNotExists(expectedKey, aggregateOperations4); assertValueExists(injection, aggregateOperations4); final String query5 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = REVERSE('%6$s')", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", new StringBuilder(injection).reverse()); final DocumentDbMqlQueryContext queryContext5 = queryMapper.get(query5); Assertions.assertNotNull(queryContext5); final List<Bson> aggregateOperations5 = queryContext5.getAggregateOperations(); assertKeyNotExists(expectedKey, aggregateOperations5); assertValueExists(injection, aggregateOperations5); // Single-quotes final String injection6 = String.format( "'}, {$delete: {'%1$s', '1'}", primaryKeyColumnName); final String query6 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = %6$s", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", quote(injection6, '\'', Collections.singletonMap("[']", "''"))); final DocumentDbMqlQueryContext queryContext6 = queryMapper.get(query6); Assertions.assertNotNull(queryContext6); final List<Bson> aggregateOperations6 = queryContext6.getAggregateOperations(); assertKeyNotExists(expectedKey, aggregateOperations6); assertValueExists(injection6, aggregateOperations6); } @Test void testSqlInjections() throws SQLException { final String primaryKeyColumnName = COLLECTION_NAME + "__id"; final String injection = String.format( "'; DELETE FROM \"%1$s\" WHERE \"%2$s\" <> '", COLLECTION_NAME, primaryKeyColumnName); final String query = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = '%6$s'", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", injection); final Exception exception = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(query)); Assertions.assertTrue(exception.getMessage().contains("Reason: 'parse failed: Encountered \";\" at line 1")); final String query2 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM (SELECT * FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" = '%6$s')", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", injection); final Exception exception2 = Assertions.assertThrows(SQLException.class, () -> queryMapper.get(query2)); Assertions.assertTrue(exception2.getMessage().contains("Reason: 'parse failed: Encountered \";\" at line 1")); // Assume SQL application will correctly escape input strings, as below final String injection3 = "'--"; final String query3 = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\"" + " WHERE \"%1$s\" > %6$s AND \"%1$s\" < 'detect value'", primaryKeyColumnName, "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array", quote(injection3, '\'', Collections.singletonMap("[']", "''"))); final DocumentDbMqlQueryContext queryContext3 = queryMapper.get(query3); Assertions.assertNotNull(queryContext3); final List<Bson> aggregateOperations3 = queryContext3.getAggregateOperations(); assertValueExists("detect value", aggregateOperations3); assertValueExists(injection3, aggregateOperations3); } private static void assertKeyNotExists(final @NonNull String expectedKey, final List<Bson> aggregateOperations) { for (final Bson op : aggregateOperations) { final BsonDocument doc = op.toBsonDocument(); assertKeyNotExists(expectedKey, doc); } } private static void assertValueExists(final @NonNull String injection, final List<Bson> aggregateOperations) { boolean valueExists = false; for (final Bson op : aggregateOperations) { final BsonDocument doc = op.toBsonDocument(); valueExists = isValueExists(injection, doc); if (valueExists) { break; } } Assertions.assertTrue(valueExists); } private static boolean isValueExists(final @NonNull String injection, final BsonDocument doc) { boolean valueExists = false; for (final Map.Entry<String, BsonValue> entry : doc.entrySet()) { final BsonValue bsonValue = entry.getValue(); if (bsonValue.isDocument()) { valueExists = isValueExists(injection, bsonValue.asDocument()); if (valueExists) { break; } } else if (bsonValue.isArray()) { valueExists = isValueExists(injection, bsonValue.asArray()); if (valueExists) { break; } } else if (bsonValue.isString()) { final String actualValue = bsonValue.asString().getValue(); if (injection.equals(actualValue)) { valueExists = true; break; } } } return valueExists; } private static boolean isValueExists(final @NonNull String injection, final BsonArray array) { boolean valueExists = false; for (final BsonValue arrayValue : array) { if (arrayValue.isDocument()) { valueExists = isValueExists(injection, arrayValue.asDocument()); if (valueExists) { break; } } else if (arrayValue.isArray()) { valueExists = isValueExists(injection, arrayValue.asArray()); if (valueExists) { break; } } else if (arrayValue.isString()) { final String actualValue = array.asString().getValue(); if (injection.equals(actualValue)) { valueExists = true; break; } } } return valueExists; } private static void assertKeyNotExists(final @NonNull String expectedKey, final BsonDocument doc) { for (final Map.Entry<String, BsonValue> entry : doc.entrySet()) { final String actualKey = entry.getKey(); Assertions.assertNotEquals(expectedKey, actualKey); if (entry.getValue().isDocument()) { assertKeyNotExists(expectedKey, entry.getValue().asDocument()); } else if (entry.getValue().isArray()) { assertKeyNotExists(expectedKey, entry.getValue().asArray()); } } } private static void assertKeyNotExists(final @NonNull String expectedKey, final BsonArray array) { for (final BsonValue value : array) { if (value.isDocument()) { assertKeyNotExists(expectedKey, value.asDocument()); } else if (value.isArray()) { assertKeyNotExists(expectedKey, value.asArray()); } } } }
4,541
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/query/limitations/DocumentDbSqlLimitationsTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.query.limitations; import org.bson.BsonDocument; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingService; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingServiceTest; import java.sql.SQLException; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbSqlLimitationsTest extends DocumentDbQueryMappingServiceTest { private static final String COLLECTION_NAME = "testCollection"; private static final String OTHER_COLLECTION_NAME = "otherTestCollection"; private static DocumentDbQueryMappingService queryMapper; @BeforeAll void initialize() throws SQLException { final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ " + "{ \"field\" : 1, \"field1\": \"value\" }, " + "{ \"field\" : 2, \"field2\" : \"value\" , \"field3\" : { \"field4\": 3} } ]}"); final BsonDocument otherDocument = BsonDocument.parse( "{ \"_id\" : \"key1\", \"otherArray\" : [ { \"field\" : 1, \"field3\": \"value\" }, { \"field\" : 2, \"field3\" : \"value\" } ]}"); insertBsonDocuments(COLLECTION_NAME, new BsonDocument[] {document}); insertBsonDocuments(OTHER_COLLECTION_NAME, new BsonDocument[] {otherDocument}); queryMapper = getQueryMappingService(); } @Test @DisplayName("Tests that GROUP BY with ROLLUP() fails as this is not supported.") void testRollup() { // DocumentDBAggregate throws exception when group type is CUBE or ROLLUP because we do not // have any logic to handle grouping by multiple group sets. Not sure if we can get the right // behaviour with only $group. // $facet may be useful here but it is not yet supported in DocumentDB. final String query = String.format( "SELECT \"%1$s\", \"%2$s\", \"%3$s\" FROM \"%4$s\".\"%5$s\" GROUP BY ROLLUP( \"%1$s\", \"%2$s\", \"%3$s\")", COLLECTION_NAME + "__id", "field", "field1", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(query), "Query requiring ROLLUP() should throw an exception."); } @Test @DisplayName("Tests that RANK() function should fail as it is not supported.") void testRank() { // Need to implement in RexToMongoTranslator. // $setWindowFields and $rank were added in 5.0 to support this. May be difficult to implement // with some combination of $group/$merge/$facet. // Translation: // DocumentDbProject(EXPR$0=[RANK() OVER (PARTITION BY $3 ORDER BY $2)]): // DocumentDbTableScan(table=[[database, testCollection_array]]): final String query = String.format( "SELECT RANK() OVER (PARTITION BY \"field1\" ORDER BY \"field\" ASC) FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(query), "Query requiring RANK() should throw an exception."); } @Test @DisplayName("Tests that ROUND() function should fail as it is not supported.") void testRound() { // Need to implement in RexToMongoTranslator. // $round was only added in 4.2. May be able to emulate combining some other arithmetic // operators. final String query = String.format( "SELECT ROUND(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(query), "Query requiring ROUND() should throw an exception"); } @Test @DisplayName( "Tests that subqueries in WHERE clause using IN or EXISTS should fail as these are not supported.") void testWhereWithSubqueries() { // WHERE NOT EXISTS, IN, NOT IN are treated as semi-join or anti-join. They go through the // DocumentDbJoin // implementation but fail join condition validations. // $lookup could be used to support these cases. // Translation: // DocumentDbToEnumerableConverter: ... // DocumentDbJoin(condition=[=($3, $9)], joinType=[semi]): ... // DocumentDbTableScan(table=[[database, testCollection_array]]): ... // DocumentDbTableScan(table=[[database, testCollection_array]]): ... final String subqueryWithIn = String.format( "SELECT * FROM \"%1$s\".\"%2$s\" WHERE \"field1\" " + "IN (SELECT \"field2\" FROM \"%1$s\".\"%2$s\")", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(subqueryWithIn), "Query with IN and a subquery should throw an exception."); // Translation: // DocumentDbToEnumerableConverter: ... // DocumentDbJoin(condition=[=($2, $7)], joinType=[semi]): ... // DocumentDbTableScan(table=[[database, testCollection_array]]): ... // DocumentDbFilter(condition=[IS NOT NULL($2)]): ... // DocumentDbTableScan(table=[[database, otherTestCollection_otherArray]]): ... final String subqueryWithExists = String.format( "SELECT * FROM \"%1$s\".\"%2$s\" WHERE EXISTS " + "(SELECT * FROM \"%1$s\".\"%3$s\" WHERE \"%2$s\".\"field\" = \"%3$s\".field)", getDatabaseName(), COLLECTION_NAME + "_array", OTHER_COLLECTION_NAME + "_otherArray"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(subqueryWithExists), "Query with EXISTS and a subquery should throw an exception."); // Translation: // DocumentDbToEnumerableConverter: ... // DocumentDbProject(testCollection__id=[$0], array_index_lvl_0=[$1], field=[$2], field1=[$3], // field2=[$4]): ... // DocumentDbJoin(condition=[<($2, $5)], joinType=[inner]): ... // DocumentDbTableScan(table=[[database, testCollection_array]]): ... // DocumentDbAggregate(group=[{}], EXPR$0=[MAX($2)]): ... // DocumentDbTableScan(table=[[database, testCollection_array]]): ... final String subqueryWithSingleValue = String.format( "SELECT * FROM \"%1$s\".\"%2$s\" WHERE \"field\" " + "< (SELECT MAX(\"field\") FROM \"%1$s\".\"%2$s\")", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(subqueryWithSingleValue), "Query with comparison operator and a subquery should throw an exception."); } @Test @DisplayName( "Tests that set operations UNION, INTERSECT and EXCEPT should fail as these are not supported.") void testSetOperations() { // No rule to transform the LogicalUnion. // Same collection only - may be able to combine $facet(unsupported) + $setUnion // Generic - $unionWith (4.4) final String unionQuery = String.format( "SELECT * FROM \"%s\".\"%s\" UNION SELECT \"%s\" FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, COLLECTION_NAME + "__id", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(unionQuery), "Query requiring UNION should throw an exception."); // No rule to transform the LogicalIntersect. // Same collection only - may be able to combine $facet(unsupported) + $setIntersection final String intersectQuery = String.format( "SELECT * FROM \"%s\".\"%s\" INTERSECT SELECT \"%s\" FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, COLLECTION_NAME + "__id", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(intersectQuery), "Query requiring INTERSECT should throw an exception."); // No rule to transform the LogicalMinus. // Same collection only - may be able to combine $facet(unsupported) + $setDifference final String exceptQuery = String.format( "SELECT * FROM \"%s\".\"%s\" EXCEPT SELECT \"%s\" FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME, COLLECTION_NAME + "__id", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(exceptQuery), "Query requiring EXCEPT or MINUS should throw an exception."); } @Test @DisplayName( "Tests that subqueries in the SELECT clause using IN or EXISTS should fail " + "as these are not supported.") void testSelectWithSubqueries() { // The various subquery aggregates are determined first and then added as a left outer join to // the table. // This would also be supported by $lookup. Other uses of subqueries in the SELECT clause should // be similar. // Translation: // DocumentDbProject(EXPR$0=[CASE(=($1, $0), $2, $3)]): // DocumentDbJoin(condition=[true], joinType=[left]): // DocumentDbJoin(condition=[true], joinType=[left]): // DocumentDbJoin(condition=[true], joinType=[left]): // DocumentDbProject(field=[$2]): // DocumentDbTableScan(table=[[database, testCollection_array]]): // DocumentDbAggregate(group=[{}], EXPR$0=[MAX($2)]): // DocumentDbTableScan(table=[[database, testCollection_array]]): // DocumentDbAggregate(group=[{}], EXPR$0=[AVG($2)]): // DocumentDbTableScan(table=[[database, testCollection_array]]): // DocumentDbAggregate(group=[{}], EXPR$0=[MIN($2)]): // DocumentDbTableScan(table=[[database, testCollection_array]]): ` final String singleValueSubquery = String.format( "SELECT CASE WHEN (SELECT MAX(\"field\") FROM \"%1$s\".\"%2$s\") = \"field\"" + "THEN (SELECT AVG(\"field\") FROM \"%1$s\".\"%2$s\") " + "ELSE (SELECT MIN(\"field\") FROM \"%1$s\".\"%2$s\") END " + "FROM \"%1$s\".\"%2$s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertThrows( SQLException.class, () -> queryMapper.get(singleValueSubquery), "Query requiring scalar subquery should throw an exception."); // A scalar subquery used like this should only return a single value. Calcite wraps potentially // non-single value subqueries in a SINGLE_VALUE aggregate function. // The SINGLE_VALUE function should return the value if there is only one. Otherwise, it should // error out at runtime. // This behaviour may be hard to push-down. Additionally, it has same challenges as above. // Translation: // DocumentDbToEnumerableConverter: // DocumentDbProject(EXPR$0=[CASE(=($0, $1), 'yes':VARCHAR(3), 'no':VARCHAR(3))]): // DocumentDbJoin(condition=[true], joinType=[left]): // DocumentDbProject(field1=[$3]): // DocumentDbTableScan(table=[[database, testCollection_array]]): // DocumentDbAggregate(group=[{}], agg#0=[SINGLE_VALUE($4)]): // DocumentDbTableScan(table=[[database, testCollection_array]]): r final String multipleValueSubQuery = String.format( "SELECT CASE WHEN \"field1\" = (SELECT \"field2\" FROM \"%1$s\".\"%2$s\")" + "THEN 'yes' ELSE 'no' END " + "FROM \"%1$s\".\"%2$s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals( "unknown aggregate SINGLE_VALUE", Assertions.assertThrows(AssertionError.class, () -> queryMapper.get(multipleValueSubQuery)) .getMessage(), "Query requiring SINGLE_VALUE function should throw an exception."); } @Test @DisplayName( "Tests that STDDEV(), STDEDEV_POP(), STD_DEV_SAMP(), VAR_POP and VAR_SAMP() should fail " + "as these are not supported aggregate functions.") void testUnsupportedAggregateFunctions() { // $stdDevPop and $stdDevSamp are in 3.6 onwards but are not supported in DocumentDB. // Variance can be derived from $stdDevPop and $stdDevSamp (variance = stdDev ^2). // $covariancePop and $covarianceSamp were added in 5.0. final String stddev = String.format( "SELECT STDDEV(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals( "unknown aggregate STDDEV", Assertions.assertThrows(AssertionError.class, () -> queryMapper.get(stddev)).getMessage(), "Query requiring STDDEV should throw an exception."); final String stddevPop = String.format( "SELECT STDDEV_POP(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals( "unknown aggregate STDDEV_POP", Assertions.assertThrows(AssertionError.class, () -> queryMapper.get(stddevPop)) .getMessage(), "Query requiring STDDEV_POP should throw an exception."); final String stddevSamp = String.format( "SELECT STDDEV_SAMP(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals( "unknown aggregate STDDEV_SAMP", Assertions.assertThrows(AssertionError.class, () -> queryMapper.get(stddevSamp)) .getMessage(), "Query requiring STDDEV_SAMP should throw an exception."); final String varPop = String.format( "SELECT VAR_POP(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals( "unknown aggregate VAR_POP", Assertions.assertThrows(AssertionError.class, () -> queryMapper.get(varPop)).getMessage(), "Query requiring VAR_POP should throw an exception."); final String varSamp = String.format( "SELECT VAR_SAMP(\"field\") FROM \"%s\".\"%s\"", getDatabaseName(), COLLECTION_NAME + "_array"); Assertions.assertEquals( "unknown aggregate VAR_SAMP", Assertions.assertThrows(AssertionError.class, () -> queryMapper.get(varSamp)).getMessage(), "Query requiring VAR_SAMP should throw an exception."); } }
4,542
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/persist/DocumentDbSchemaWriterTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.persist; import com.mongodb.MongoException; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDecimal128; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.BsonTimestamp; import org.bson.types.Decimal128; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGenerator; import java.sql.SQLException; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.getPropertiesFromConnectionString; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SCHEMA_TABLE_ID_SEPARATOR; class DocumentDbSchemaWriterTest { private static final String DATABASE_NAME = "testDb"; private DocumentDbTestEnvironment testEnvironment; private static Stream<DocumentDbTestEnvironment> getTestEnvironments() { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream(); } @BeforeAll static void beforeAll() throws Exception { for (DocumentDbTestEnvironment testEnvironment : getTestEnvironments().collect(Collectors.toList())) { // Start the test environment. testEnvironment.start(); } } @AfterEach void afterEach() throws SQLException { final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, null); schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); } @AfterAll static void afterAll() throws Exception { for (DocumentDbTestEnvironment testEnvironment : getTestEnvironments().collect(Collectors.toList())) { testEnvironment.stop(); } } @DisplayName("Tests writing the complete schema.") @ParameterizedTest(name = "testWriterWholeSchema - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testWriterWholeSchema(final DocumentDbTestEnvironment testEnvironment) throws SQLException, DocumentDbSchemaSecurityException { final DocumentDbConnectionProperties properties = getConnectionProperties(testEnvironment); final String collectionName = "testWriterWholeSchema"; final Map<String, DocumentDbSchemaTable> metadata = getSchemaTableMap(collectionName); final DocumentDbSchema schema = new DocumentDbSchema(DATABASE_NAME, 1, metadata); final DocumentDbSchemaWriter writer = new DocumentDbSchemaWriter(properties, null); writer.write(schema, metadata.values()); } @DisplayName("Tests updating table schema.") @ParameterizedTest(name = "testWriteTableSchema - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testWriteTableSchema(final DocumentDbTestEnvironment testEnvironment) throws Exception { final DocumentDbConnectionProperties properties = getConnectionProperties(testEnvironment); final String collectionName = "testWriteTableSchema"; final Map<String, DocumentDbSchemaTable> metadata = getSchemaTableMap(collectionName); final DocumentDbSchema schema = new DocumentDbSchema(DATABASE_NAME, 1, metadata); final String newUuid = UUID.randomUUID().toString(); final String newSqlName = UUID.randomUUID().toString(); try (DocumentDbSchemaWriter writer = new DocumentDbSchemaWriter(properties, null)) { // Write initial schema writer.write(schema, schema.getTableMap().values()); // Update the schema to create a new one. final DocumentDbSchemaTable schemaTable = schema.getTableMap().get(collectionName); schemaTable.setUuid(newUuid); schemaTable.setSqlName(newSqlName); writer.update(schema, Collections.singletonList(schemaTable)); } // Ensure both versions exist. try (DocumentDbSchemaReader reader = new DocumentDbSchemaReader(properties, null)) { final DocumentDbSchema schema1 = reader.read(schema.getSchemaName(), 1); Assertions.assertNotNull(schema1); Assertions.assertEquals(schema, schema1); Assertions.assertEquals(schema.getTableReferences().size(), schema1.getTableReferences().size()); Assertions.assertArrayEquals( schema.getTableReferences().toArray(new String[0]), schema1.getTableReferences().toArray(new String[0])); final DocumentDbSchema schema2 = reader.read(schema.getSchemaName(), 2); Assertions.assertNotNull(schema2); Assertions.assertEquals(1, schema2.getTableReferences().size()); Assertions.assertEquals( newSqlName + SCHEMA_TABLE_ID_SEPARATOR + newUuid, schema2.getTableReferences().toArray(new String[0])[0]); } } @DisplayName("Tests failing to write schema for restricted user.") @ParameterizedTest(name = "testWriteSchemaRestrictedUser - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testWriteSchemaRestrictedUser(final DocumentDbTestEnvironment testEnvironment) throws SQLException { final DocumentDbConnectionProperties properties = getConnectionProperties(testEnvironment, true); final String collectionName = "testWriteTableSchema"; final Map<String, DocumentDbSchemaTable> metadata = getSchemaTableMap(collectionName); final DocumentDbSchema schema = new DocumentDbSchema(DATABASE_NAME, 1, metadata); final DocumentDbSchemaWriter writer = new DocumentDbSchemaWriter(properties, null); final DocumentDbSchemaSecurityException exception = Assertions .assertThrows(DocumentDbSchemaSecurityException.class, () -> writer.write(schema, schema.getTableMap().values())); Assertions.assertTrue(exception.getCause() instanceof MongoException); final MongoException mongoException = (MongoException) exception.getCause(); Assertions.assertEquals(13, mongoException.getCode()); Assertions.assertTrue(mongoException.getMessage().startsWith("Command failed with error 13")); } private DocumentDbConnectionProperties getConnectionProperties( final DocumentDbTestEnvironment testEnvironment) throws SQLException { return getConnectionProperties(testEnvironment, false); } private DocumentDbConnectionProperties getConnectionProperties( final DocumentDbTestEnvironment testEnvironment, final boolean isRestrictedUser) throws SQLException { this.testEnvironment = testEnvironment; return getPropertiesFromConnectionString( isRestrictedUser ? testEnvironment.getRestrictedUserConnectionString() : testEnvironment.getJdbcConnectionString()); } private Map<String, DocumentDbSchemaTable> getSchemaTableMap( final String collectionName) { final List<BsonDocument> documentList = new ArrayList<>(); for (int count = 0; count < 3; count++) { final Instant dateTime = Instant.parse("2020-01-01T00:00:00.00Z"); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime.toEpochMilli())) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})) .append("fieldTimestamp", new BsonTimestamp((int) TimeUnit.MILLISECONDS.toSeconds(dateTime.toEpochMilli()), 0)); Assertions.assertTrue(documentList.add(document)); } // Discover the collection metadata. return DocumentDbTableSchemaGenerator.generate( collectionName, documentList.iterator()); } }
4,543
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/persist/DocumentDbSchemaReaderTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.persist; import com.mongodb.MongoSecurityException; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDecimal128; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.BsonTimestamp; import org.bson.types.Decimal128; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGenerator; import java.sql.SQLException; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.getPropertiesFromConnectionString; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.DEFAULT_SCHEMA_NAME; class DocumentDbSchemaReaderTest { private static final String DATABASE_NAME = "testDb"; private static final String COLLECTION_NAME = DocumentDbSchemaReaderTest.class.getSimpleName(); private static final Map<String, DocumentDbSchemaTable> METADATA; private static final DocumentDbSchema SCHEMA; private static final String TABLE_ID; private static Stream<DocumentDbTestEnvironment> getTestEnvironments() { return DocumentDbTestEnvironmentFactory.getConfiguredEnvironments().stream(); } static { final List<BsonDocument> documentList = new ArrayList<>(); for (int count = 0; count < 3; count++) { final Instant dateTime = Instant.parse("2020-01-01T00:00:00.00Z"); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime.toEpochMilli())) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})) .append("fieldTimestamp", new BsonTimestamp((int) TimeUnit.MILLISECONDS.toSeconds(dateTime.toEpochMilli()), 1)); Assertions.assertTrue(documentList.add(document)); } // Discover the collection metadata. METADATA = DocumentDbTableSchemaGenerator.generate(COLLECTION_NAME, documentList.iterator()); SCHEMA = new DocumentDbSchema(DATABASE_NAME, 1, METADATA); final DocumentDbSchemaTable schemaTable = METADATA.get(COLLECTION_NAME); Assertions.assertNotNull(schemaTable); TABLE_ID = schemaTable.getId(); } @BeforeAll static void beforeAll() throws Exception { for (DocumentDbTestEnvironment testEnvironment : getTestEnvironments() .collect(Collectors.toList())) { // Start the test environment. testEnvironment.start(); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaWriter writer = new DocumentDbSchemaWriter(properties, null); writer.write(SCHEMA, METADATA.values()); } } @AfterAll static void afterAll() throws Exception { for (DocumentDbTestEnvironment testEnvironment : getTestEnvironments() .collect(Collectors.toList())) { final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, null); schemaWriter.remove(DocumentDbSchema.DEFAULT_SCHEMA_NAME); testEnvironment.stop(); } } @DisplayName("Test reading default schema with no options.") @ParameterizedTest(name = "testRead - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testRead(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchema schema = schemaReader.read(); Assertions.assertNotNull(schema); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schema.getSchemaName()); Assertions.assertEquals(DATABASE_NAME, schema.getSqlName()); Assertions.assertEquals(1, schema.getSchemaVersion()); Assertions.assertNotNull(schema.getTableReferences()); Assertions.assertEquals(1, schema.getTableReferences().size()); } @DisplayName("Test reading a schema by name.") @ParameterizedTest(name = "testReadWithSchema - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadWithSchema(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchema schema = schemaReader.read(DEFAULT_SCHEMA_NAME); Assertions.assertNotNull(schema); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schema.getSchemaName()); Assertions.assertEquals(DATABASE_NAME, schema.getSqlName()); Assertions.assertEquals(1, schema.getSchemaVersion()); Assertions.assertNotNull(schema.getTableReferences()); Assertions.assertEquals(1, schema.getTableReferences().size()); } @DisplayName("Test reading schema by name and version.") @ParameterizedTest(name = "testReadWithVersion - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadWithVersion(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchema schema = schemaReader.read(DEFAULT_SCHEMA_NAME, 1); Assertions.assertNotNull(schema); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schema.getSchemaName()); Assertions.assertEquals(DATABASE_NAME, schema.getSqlName()); Assertions.assertEquals(1, schema.getSchemaVersion()); Assertions.assertNotNull(schema.getTableReferences()); Assertions.assertEquals(1, schema.getTableReferences().size()); } @DisplayName("Test reading a specific table schema.") @ParameterizedTest(name = "testReadTable - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadTable(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchemaTable schemaTable = schemaReader.readTable(DEFAULT_SCHEMA_NAME, 1, TABLE_ID); Assertions.assertNotNull(schemaTable); Assertions.assertEquals(TABLE_ID, schemaTable.getId()); Assertions.assertEquals(COLLECTION_NAME, schemaTable.getSqlName()); Assertions.assertEquals(COLLECTION_NAME, schemaTable.getCollectionName()); Assertions.assertNotNull(schemaTable.getColumnMap()); Assertions.assertEquals(14, schemaTable.getColumnMap().size()); } // Negative tests @DisplayName("Test reading schema with non-existent version.") @ParameterizedTest(name = "testReadWithNonExistentVersion - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadWithNonExistentVersion(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchema schema = schemaReader.read(DEFAULT_SCHEMA_NAME, 2); Assertions.assertNull(schema); } @DisplayName("Test reading schema with non-existent schema name.") @ParameterizedTest(name = "testReadWithNonExistentSchema - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadWithNonExistentSchema(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchema schema = schemaReader.read("unknown"); Assertions.assertNull(schema); } @DisplayName("Test reading schema with invalid connection properties.") @ParameterizedTest(name = "testReadWithInvalidConnectionProperties - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadWithInvalidConnectionProperties(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getJdbcConnectionString()); final DocumentDbConnectionProperties newProperties = new DocumentDbConnectionProperties(properties); newProperties.setUser("unknown"); final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(newProperties, null); Assertions.assertEquals("Exception authenticating " + "MongoCredential{mechanism=SCRAM-SHA-1, userName='unknown', " + "source='admin', password=<hidden>, mechanismProperties=<hidden>}", Assertions.assertThrows(MongoSecurityException.class, schemaReader::read) .getMessage()); } @DisplayName("Test reading schema using restricted user.") @ParameterizedTest(name = "testReadWithRestrictedUser - [{index}] - {arguments}") @MethodSource("getTestEnvironments") void testReadWithRestrictedUser(final DocumentDbTestEnvironment testEnvironment) throws SQLException { Assertions.assertNotNull(testEnvironment); final DocumentDbConnectionProperties properties = getPropertiesFromConnectionString( testEnvironment.getRestrictedUserConnectionString()); // This will allow read of the schema collection(s) final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, null); final DocumentDbSchema schema = schemaReader.read(); Assertions.assertNotNull(schema); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schema.getSchemaName()); Assertions.assertEquals(DATABASE_NAME, schema.getSqlName()); Assertions.assertEquals(1, schema.getSchemaVersion()); Assertions.assertNotNull(schema.getTableReferences()); Assertions.assertEquals(1, schema.getTableReferences().size()); final DocumentDbSchemaTable schemaTable = schemaReader.readTable( schema.getSchemaName(), schema.getSchemaVersion(), schema.getTableReferences().toArray(new String[]{})[0]); Assertions.assertNotNull(schemaTable); Assertions.assertEquals(TABLE_ID, schemaTable.getId()); Assertions.assertEquals(COLLECTION_NAME, schemaTable.getSqlName()); Assertions.assertEquals(COLLECTION_NAME, schemaTable.getCollectionName()); Assertions.assertNotNull(schemaTable.getColumnMap()); Assertions.assertEquals(14, schemaTable.getColumnMap().size()); } }
4,544
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbTableSchemaGeneratorTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import org.bson.BsonType; import java.util.Map; class DocumentDbTableSchemaGeneratorTest { protected static final String COLLECTION_NAME = DocumentDbTableSchemaGeneratorTest.class.getSimpleName(); private static final boolean DEMO_MODE = false; protected boolean producesVirtualTable(final BsonType bsonType, final BsonType nextBsonType) { return (bsonType == BsonType.ARRAY && nextBsonType == BsonType.ARRAY) || (bsonType == BsonType.DOCUMENT && nextBsonType == BsonType.DOCUMENT) || (bsonType == BsonType.NULL && (nextBsonType == BsonType.ARRAY || nextBsonType == BsonType.DOCUMENT)) || (nextBsonType == BsonType.NULL && (bsonType == BsonType.ARRAY || bsonType == BsonType.DOCUMENT)); } protected void printMetadataOutput(final Map<String, DocumentDbSchemaTable> model, final String testName) { if (DEMO_MODE) { final String nameOfTest = testName != null ? testName : "TEST"; System.out.printf("Start of %s%n", nameOfTest); System.out.println(model.toString()); System.out.printf("End of %s%n", nameOfTest); } } protected static String getMethodName() { final StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); final String methodName; final int stackDepth = 2; if (stackDepth < stackTraceElements.length) { methodName = stackTraceElements[stackDepth].getMethodName(); } else { methodName = ""; } return methodName; } }
4,545
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataColumnTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import org.bson.BsonType; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; public class DocumentDbMetadataColumnTest { @DisplayName("Tests equals() method with different combinations.") @Test void testEquals() { final DocumentDbMetadataColumn column1 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column2 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column3 = new DocumentDbMetadataColumn( 1, 2, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column4 = new DocumentDbMetadataColumn( 1, 1, 2, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column5 = new DocumentDbMetadataColumn( 1, 1, 1, 2, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column6 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "other", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column7 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "other", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column8 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "other", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column9 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", true, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column10 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "other", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column11 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "other", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column12 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.VARCHAR, BsonType.INT64, false, true, "table", "table"); final DocumentDbMetadataColumn column13 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.STRING, false, true, "table", "table"); final DocumentDbMetadataColumn column14 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, true, true, "table", "table"); final DocumentDbMetadataColumn column15 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, false, "table", "table"); final DocumentDbMetadataColumn column16 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "other", "table"); final DocumentDbMetadataColumn column17 = new DocumentDbMetadataColumn( 1, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "other"); final DocumentDbMetadataColumn column18 = new DocumentDbMetadataColumn( 2, 1, 1, 1, "table", "table", "path", false, "path", "column", JdbcType.BIGINT, BsonType.INT64, false, true, "table", "table"); Assertions.assertTrue(column1.equals(column1)); Assertions.assertTrue(column1.equals(column2)); Assertions.assertFalse(column1.equals(column3)); Assertions.assertFalse(column1.equals(column4)); Assertions.assertFalse(column1.equals(column5)); Assertions.assertFalse(column1.equals(column6)); Assertions.assertFalse(column1.equals(column7)); Assertions.assertFalse(column1.equals(column8)); Assertions.assertFalse(column1.equals(column9)); Assertions.assertFalse(column1.equals(column10)); Assertions.assertFalse(column1.equals(column11)); Assertions.assertFalse(column1.equals(column12)); Assertions.assertFalse(column1.equals(column13)); Assertions.assertFalse(column1.equals(column14)); Assertions.assertFalse(column1.equals(column15)); Assertions.assertFalse(column1.equals(column16)); Assertions.assertFalse(column1.equals(column17)); Assertions.assertFalse(column1.equals(column18)); Assertions.assertFalse(column1.equals(new Object())); } }
4,546
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.bson.BsonDocument; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironment; import software.amazon.documentdb.jdbc.common.test.DocumentDbTestEnvironmentFactory; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter; import java.sql.SQLException; import java.util.UUID; import java.util.function.Consumer; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_LATEST_OR_NEW; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_NEW; class DocumentDbMetadataTest { // Need to start and stop the test environment between tests to clear the collections. @BeforeEach void beforeEach() throws Exception { // Start the test environment. final DocumentDbTestEnvironment testEnvironment = DocumentDbTestEnvironmentFactory .getMongoDb40Environment(); testEnvironment.start(); } @AfterEach void afterEach() throws Exception { // Stop the test environment. DocumentDbTestEnvironmentFactory.getMongoDb40Environment().stop(); } @DisplayName("Test to get database metadata for initial or latest version.") @Test void testGetInitialWithRefresh() throws Exception { final DocumentDbTestEnvironment testEnvironment = DocumentDbTestEnvironmentFactory .getMongoDb40Environment(); final MongoClient client = testEnvironment.createMongoClient(); final String schemaName = UUID.randomUUID().toString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); final DocumentDbDatabaseSchemaMetadata databaseMetadata0 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, client); Assertions.assertEquals(1, databaseMetadata0.getSchemaVersion()); Assertions.assertEquals(0, databaseMetadata0.getTableSchemaMap().size()); // Prepare some data. final String collectionName = testEnvironment.newCollectionName(true); prepareTestData( client, testEnvironment.getDatabaseName(), collectionName, collection -> testEnvironment .prepareSimpleConsistentData(collection, 10)); // Even though we've added data, we're not refreshing, so expecting 0. final DocumentDbDatabaseSchemaMetadata databaseMetadata00 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, VERSION_LATEST_OR_NEW, client); Assertions.assertEquals(0, databaseMetadata00.getTableSchemaMap().size()); Assertions.assertEquals(1, databaseMetadata0.getSchemaVersion()); Assertions.assertEquals(0, databaseMetadata0.getTableSchemaMap().size()); // Now use the "refreshAll=true" flag to re-read the collection(s). final DocumentDbDatabaseSchemaMetadata databaseMetadata1 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, VERSION_NEW, client); Assertions.assertEquals(1, databaseMetadata1.getTableSchemaMap().size()); Assertions.assertEquals(2, databaseMetadata1.getSchemaVersion()); final DocumentDbSchemaTable metadataTable = databaseMetadata1.getTableSchemaMap().get(collectionName); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(14, metadataTable.getColumnMap().size()); // Without a refresh we'll get the same metadata. final DocumentDbDatabaseSchemaMetadata databaseMetadata2 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, client); // This is exactly the same as it is cached. Assertions.assertEquals(databaseMetadata1, databaseMetadata2); Assertions.assertEquals(2, databaseMetadata2.getSchemaVersion()); try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, client)) { schemaWriter.remove(schemaName); } } @DisplayName("Test to get database metadata for specific version.") @Test void testGetSpecific() throws Exception { final DocumentDbTestEnvironment testEnvironment = DocumentDbTestEnvironmentFactory .getMongoDb40Environment(); final MongoClient client = testEnvironment.createMongoClient(); final String schemaName = UUID.randomUUID().toString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); final DocumentDbDatabaseSchemaMetadata databaseMetadata0 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, client); Assertions.assertEquals(0, databaseMetadata0.getTableSchemaMap().size()); Assertions.assertEquals(1, databaseMetadata0.getSchemaVersion()); // Prepare some data. final String collectionName = testEnvironment.newCollectionName(true); prepareTestData( client, testEnvironment.getDatabaseName(), collectionName, collection -> testEnvironment.prepareSimpleConsistentData(collection, 10)); // Now use the "refreshAll=true" flag to re-read the collection(s). final DocumentDbDatabaseSchemaMetadata databaseMetadata1 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, VERSION_NEW, client); Assertions.assertEquals(1, databaseMetadata1.getTableSchemaMap().size()); Assertions.assertEquals(2, databaseMetadata1.getSchemaVersion()); final DocumentDbSchemaTable metadataTable = databaseMetadata1.getTableSchemaMap().get(collectionName); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(14, metadataTable.getColumnMap().size()); final DocumentDbDatabaseSchemaMetadata databaseMetadata2 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, databaseMetadata1.getSchemaVersion(), client); Assertions.assertEquals(databaseMetadata1, databaseMetadata2); // Check that specifying an unknown version results in no associated metadata. final DocumentDbDatabaseSchemaMetadata databaseMetadata3 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, databaseMetadata1.getSchemaVersion() + 1, client); Assertions.assertNull(databaseMetadata3); try (DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, client)) { schemaWriter.remove(schemaName); } } @DisplayName("Tests removing all versions of schema") @Test void testRemoveSchema() throws SQLException { final DocumentDbTestEnvironment testEnvironment = DocumentDbTestEnvironmentFactory .getMongoDb40Environment(); final MongoClient client = testEnvironment.createMongoClient(); final String schemaName = UUID.randomUUID().toString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); final DocumentDbDatabaseSchemaMetadata databaseMetadata0 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, client); Assertions.assertEquals(0, databaseMetadata0.getTableSchemaMap().size()); Assertions.assertEquals(1, databaseMetadata0.getSchemaVersion()); // Prepare some data. final String collectionName = testEnvironment.newCollectionName(true); prepareTestData( client, testEnvironment.getDatabaseName(), collectionName, collection -> testEnvironment.prepareSimpleConsistentData(collection, 10)); // Now use the "refreshAll=true" flag to re-read the collection(s). final DocumentDbDatabaseSchemaMetadata databaseMetadata1 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, VERSION_NEW, client); Assertions.assertEquals(1, databaseMetadata1.getTableSchemaMap().size()); Assertions.assertEquals(2, databaseMetadata1.getSchemaVersion()); DocumentDbDatabaseSchemaMetadata.remove(properties, schemaName, client); final DocumentDbDatabaseSchemaMetadata databaseMetadata2 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, 2, client); Assertions.assertNull(databaseMetadata2); final DocumentDbDatabaseSchemaMetadata databaseMetadata3 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, 1, client); Assertions.assertNull(databaseMetadata3); } @DisplayName("Tests removing a specific version of schema") @Test void testRemoveSpecificSchema() throws SQLException { final DocumentDbTestEnvironment testEnvironment = DocumentDbTestEnvironmentFactory .getMongoDb40Environment(); final MongoClient client = testEnvironment.createMongoClient(); final String schemaName = UUID.randomUUID().toString(); final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(testEnvironment.getJdbcConnectionString()); final DocumentDbDatabaseSchemaMetadata databaseMetadata0 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, client); Assertions.assertEquals(0, databaseMetadata0.getTableSchemaMap().size()); Assertions.assertEquals(1, databaseMetadata0.getSchemaVersion()); // Prepare some data. final String collectionName = testEnvironment.newCollectionName(true); prepareTestData(client, testEnvironment.getDatabaseName(), collectionName, collection -> testEnvironment.prepareSimpleConsistentData(collection, 10)); // Now use the "refreshAll=true" flag to re-read the collection(s). final DocumentDbDatabaseSchemaMetadata databaseMetadata1 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, VERSION_NEW, client); Assertions.assertEquals(1, databaseMetadata1.getTableSchemaMap().size()); Assertions.assertEquals(2, databaseMetadata1.getSchemaVersion()); DocumentDbDatabaseSchemaMetadata.remove(properties, schemaName, 2, client); final DocumentDbDatabaseSchemaMetadata databaseMetadata2 = DocumentDbDatabaseSchemaMetadata .get(properties, schemaName, 2, client); Assertions.assertNull(databaseMetadata2); } private static void prepareTestData( final MongoClient client, final String databaseName, final String collectionName, final Consumer<MongoCollection<BsonDocument>> dataPreparer) { final MongoDatabase database = client.getDatabase(databaseName); final MongoCollection<BsonDocument> collection = database.getCollection(collectionName, BsonDocument.class); dataPreparer.accept(collection); } }
4,547
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbTableSchemaGeneratorIdentifierTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import org.bson.BsonArray; import org.bson.BsonDocument; import org.bson.BsonInt32; import org.bson.BsonNull; import org.bson.BsonValue; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.apache.calcite.sql.parser.SqlParser.DEFAULT_IDENTIFIER_MAX_LENGTH; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.toName; /** * These tests check that table and column names are truncated to fit the max identifier length * for Calcite queries. */ public class DocumentDbTableSchemaGeneratorIdentifierTest extends DocumentDbTableSchemaGeneratorTest { @Test @DisplayName("Tests identifier names that are longer than allowed maximum") void testLongName() { String testPath = "a.b.c"; final Map<String, String> tableNameMap = new HashMap<>(); String testName; testName = toName(testPath, tableNameMap, 128); Assertions.assertEquals("a_b_c", testName); testName = toName(testPath, tableNameMap, 4); Assertions.assertEquals("a_c", testName); // Uses cached value testName = toName(testPath, tableNameMap, 128); Assertions.assertEquals("a_c", testName); testPath = "a.b.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_g", testName); testPath = "a.c.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_1", testName); testPath = "a.d.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_2", testName); testPath = "a.e.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_3", testName); testPath = "a.f.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_4", testName); testPath = "a.g.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_5", testName); testPath = "a.h.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_6", testName); testPath = "a.i.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_7", testName); testPath = "a.j.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_8", testName); testPath = "a.k.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f_9", testName); testPath = "a.l.c.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("a_d_e_f10", testName); testPath = "12345678901.x.y.d.e.f.g"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("_d_e_f_g", testName); testPath = "baseTable01"; // "12345678901"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("baseTable0", testName); testPath = "baseTable01.childtble01"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("hildtble01", testName); testPath = "baseTable02.childtble01"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("hildtble02", testName); testPath = "baseTable02.childtble02"; testName = toName(testPath, tableNameMap, 10); Assertions.assertEquals("hildtble03", testName); } @DisplayName("Tests that even deeply nested documents and array have name length less than max.") @Test void testDeeplyNestedDocumentsArraysForSqlNameLength() { BsonValue doc = new BsonNull(); for (int i = 199; i >= 0; i--) { doc = new BsonDocument("_id", new BsonInt32(i)) .append(i + "field", new BsonInt32(i)) .append(i + "doc", doc) .append(i + "array", new BsonArray(Collections.singletonList(new BsonInt32(i)))); } final Map<String, DocumentDbSchemaTable> tableMap = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Collections.singleton((BsonDocument) doc).iterator()); Assertions.assertEquals(400, tableMap.size()); tableMap.keySet().stream() .map(tableName -> tableName.length() <= DEFAULT_IDENTIFIER_MAX_LENGTH) .forEach(Assertions::assertTrue); tableMap.values().stream() .flatMap(schemaTable -> schemaTable.getColumns().stream()) .map(schemaColumn -> schemaColumn.getSqlName().length() <= DEFAULT_IDENTIFIER_MAX_LENGTH) .forEach(Assertions::assertTrue); } }
4,548
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataScannerTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import org.bson.BsonDocument; import org.bson.BsonInt32; import org.bson.BsonObjectId; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.DocumentDbMetadataScanMethod; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleExtension; import software.amazon.documentdb.jdbc.common.test.DocumentDbFlapDoodleTest; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; @ExtendWith(DocumentDbFlapDoodleExtension.class) public class DocumentDbMetadataScannerTest extends DocumentDbFlapDoodleTest { private static final String USER = "user"; private static final String PASS = "password"; private static final String DATABASE = "testDb"; private static final String HOST = "localhost"; private static final String ADMIN = "admin"; private DocumentDbConnectionProperties properties; private ArrayList<BsonDocument> documents; private ArrayList<String> ids; private MongoDatabase database; /** * Init mongodb for testing */ @BeforeAll public void setup() { createUser(DATABASE, USER, PASS); final MongoClient client = createMongoClient(ADMIN, USER, PASS); database = client.getDatabase(DATABASE); } /** * Resets the properties, documents, and ids. */ @BeforeEach public void closeAndResetProperties() { properties = new DocumentDbConnectionProperties(); properties.setUser(USER); properties.setPassword(PASS); properties.setDatabase(DATABASE); properties.setTlsEnabled("false"); properties.setHostname(HOST + ":" + getMongoPort()); documents = new ArrayList<>(); ids = new ArrayList<>(); } /** * Test for basic default (random) scan. */ @Test public void testGetIteratorBasic() throws SQLException { addSimpleDataToDatabase(3, "testGetIteratorBasic"); final HashSet<BsonDocument> documentSet = new HashSet<>(documents); Assertions.assertEquals(DocumentDbMetadataScanMethod.RANDOM, properties.getMetadataScanMethod()); properties.setMetadataScanLimit("1"); final MongoCollection<BsonDocument> collection = database.getCollection("testGetIteratorBasic", BsonDocument.class); final Iterator<BsonDocument> iterator = DocumentDbMetadataScanner.getIterator(properties, collection); Assertions.assertTrue(documentSet.contains(iterator.next())); Assertions.assertThrows(NoSuchElementException.class, iterator::next); } /** * Test that forward id scan works and is ordered correctly. */ @Test public void testGetIteratorForward() throws SQLException { addSimpleDataToDatabase(10, "testGetIteratorForward"); properties.setMetadataScanMethod(DocumentDbMetadataScanMethod.ID_FORWARD.getName()); properties.setMetadataScanLimit("5"); final MongoCollection<BsonDocument> collection = database.getCollection("testGetIteratorForward", BsonDocument.class); final Iterator<BsonDocument> iterator = DocumentDbMetadataScanner.getIterator(properties, collection); Collections.sort(ids); for (int i = 0; i < 5; i++) { Assertions.assertTrue(iterator.hasNext()); final BsonDocument document = iterator.next(); Assertions.assertEquals(ids.get(i), document.get("_id").toString()); } Assertions.assertThrows(NoSuchElementException.class, iterator::next); } /** * Tests that random scanning produces different iterators each time. * * NOTE: In theory could fail incorrectly, as there is a slim (<0.0001%) chance that the randomization * will result in identical iterators. */ @Test public void testGetIteratorRandom() throws SQLException { addSimpleDataToDatabase(105, "testGetIteratorRandom"); properties.setMetadataScanMethod(DocumentDbMetadataScanMethod.RANDOM.getName()); properties.setMetadataScanLimit("5"); final MongoCollection<BsonDocument> collection = database.getCollection("testGetIteratorRandom", BsonDocument.class); final Iterator<BsonDocument> iterator = DocumentDbMetadataScanner.getIterator(properties, collection); final Iterator<BsonDocument> iteratorRepeat = DocumentDbMetadataScanner.getIterator(properties, collection); final ArrayList<BsonDocument> firstDocumentList = new ArrayList<>(); final ArrayList<BsonDocument> secondDocumentList = new ArrayList<>(); while (iterator.hasNext()) { firstDocumentList.add(iterator.next()); secondDocumentList.add(iteratorRepeat.next()); } Assertions.assertNotEquals(firstDocumentList, secondDocumentList); } /** * Test for all option. */ @Test public void testGetIteratorAll() throws SQLException { addSimpleDataToDatabase(3, "testGetIteratorAll"); final HashSet<BsonDocument> documentSet = new HashSet<>(documents); properties.setMetadataScanMethod(DocumentDbMetadataScanMethod.ALL.getName()); final MongoCollection<BsonDocument> collection = database.getCollection("testGetIteratorAll", BsonDocument.class); final Iterator<BsonDocument> iterator = DocumentDbMetadataScanner.getIterator(properties, collection); for (int n = 0; n < 3; n++) { Assertions.assertTrue(documentSet.contains(iterator.next())); } Assertions.assertThrows(NoSuchElementException.class, iterator::next); } /** * Tests get iterator with reverse id order. */ @Test public void testGetIteratorReverse() throws SQLException { addSimpleDataToDatabase(5, "testGetIteratorReverse"); properties.setMetadataScanLimit("5"); properties.setMetadataScanMethod(DocumentDbMetadataScanMethod.ID_REVERSE.getName()); final MongoCollection<BsonDocument> collection = database.getCollection("testGetIteratorReverse", BsonDocument.class); final Iterator<BsonDocument> iterator = DocumentDbMetadataScanner.getIterator(properties, collection); Collections.sort(ids); for (int i = 4; i >= 0; i--) { Assertions.assertTrue(iterator.hasNext()); final BsonDocument document = iterator.next(); Assertions.assertEquals(ids.get(i), document.get("_id").toString()); } Assertions.assertThrows(NoSuchElementException.class, iterator::next); } /** * Prepares data for a given database and collection. * @param recordCount - the number of records to insert data into. */ protected void addSimpleDataToDatabase(final int recordCount, final String collectionName) { final MongoCollection<BsonDocument> collection = database .getCollection(collectionName, BsonDocument.class); for (int count = 0; count < recordCount; count++) { final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("count", new BsonInt32(count)); documents.add(document); ids.add(document.get("_id").toString()); } collection.insertMany(documents); Assertions.assertEquals(recordCount, collection.countDocuments()); } }
4,549
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbSchemaTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.StdSerializer; import com.fasterxml.jackson.databind.util.StdDateFormat; import com.fasterxml.jackson.datatype.guava.GuavaModule; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDecimal128; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.types.Decimal128; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.DEFAULT_SCHEMA_NAME; class DocumentDbSchemaTest { private static final String COLLECTION_NAME = DocumentDbTableSchemaGeneratorTest.class.getSimpleName(); private static final ObjectMapper OBJECT_MAPPER = JsonMapper.builder() .serializationInclusion(Include.NON_NULL) .serializationInclusion(Include.NON_EMPTY) .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) .defaultDateFormat(new StdDateFormat().withColonInTimeZone(true)) // Enable fail on unknown properties to ensure exact interface match .enable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) .enable(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT) // Make the enums lower case. .enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS) .addModule(buildEnumLowerCaseSerializerModule()) .addModule(new GuavaModule()) .build(); private static SimpleModule buildEnumLowerCaseSerializerModule() { final SimpleModule module = new SimpleModule(); final JsonSerializer<Enum> serializer = new StdSerializer<Enum>(Enum.class) { @Override public void serialize(final Enum value, final JsonGenerator jGen, final SerializerProvider provider) throws IOException { jGen.writeString(value.name().toLowerCase()); } }; module.addSerializer(Enum.class, serializer); return module; } @DisplayName("Tests deserialization of schema.") @Test void testDeserialize() throws JsonProcessingException, DocumentDbSchemaException { final List<BsonDocument> documentList = new ArrayList<>(); for (int count = 0; count < 3; count++) { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime)) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})); Assertions.assertTrue(documentList.add(document)); } // Discover the collection metadata. final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documentList.iterator()); final DocumentDbSchema schema1 = new DocumentDbSchema( "testDb", 1, metadata); // Serialize/deserialize the object. final String schemaJson = OBJECT_MAPPER.writeValueAsString(schema1); final DocumentDbSchema schema2 = OBJECT_MAPPER.readValue( schemaJson, DocumentDbSchema.class); Assertions.assertNotNull(schema2); // Use the original collection to lazy load the tables. schema2.setGetTableFunction( tableId -> schema1.getTableMap().get(tableId.split("[:][:]")[0]), remaining -> remaining.stream() .collect(Collectors.toMap( tableId -> tableId, tableId -> schema1.getTableMap().get(tableId), (a, b) -> b, LinkedHashMap::new))); Assertions.assertEquals(1, schema2.getTableMap().size()); Assertions.assertEquals(schema1.getTableMap().get(COLLECTION_NAME), schema2.getTableMap().get(COLLECTION_NAME)); Assertions.assertEquals(schema1, schema2); // Performs a member-wise check for (DocumentDbSchemaTable tableSchema : schema1.getTableMap().values()) { final String tableSchemaJson = OBJECT_MAPPER.writeValueAsString(tableSchema); final DocumentDbSchemaTable deserializedTableSchema = OBJECT_MAPPER.readValue( tableSchemaJson, DocumentDbSchemaTable.class); // Note this is reversed because deserializedTableSchema is of type DocumentDbSchemaTable // but tableSchema is of type DocumentDbMetadataTable. Assertions.assertEquals(deserializedTableSchema, tableSchema); } } @DisplayName("Tests serialization of schema.") @Test void testSerialize() throws JsonProcessingException { final List<BsonDocument> documentList = new ArrayList<>(); for (int count = 0; count < 3; count++) { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime)) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})); Assertions.assertTrue(documentList.add(document)); } // Discover the collection metadata. final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documentList.iterator()); final DocumentDbSchema schema1 = new DocumentDbSchema( "testDb", 1, metadata); @SuppressWarnings("unchecked") final Map<String, Object> schemaMap = OBJECT_MAPPER.convertValue(schema1, Map.class); final List<String> keys = Arrays.asList("schemaName", "sqlName", "schemaVersion", "modifyDate", "tables"); Assertions.assertTrue(schemaMap.keySet().containsAll(keys)); Assertions.assertTrue(keys.containsAll(schemaMap.keySet())); Assertions.assertEquals(DEFAULT_SCHEMA_NAME, schemaMap.get("schemaName")); Assertions.assertEquals("testDb", schemaMap.get("sqlName")); Assertions.assertEquals(1, schemaMap.get("schemaVersion")); Assertions.assertTrue(schemaMap.get("modifyDate") instanceof String); Assertions.assertTrue(schemaMap.get("tables") instanceof List<?>); @SuppressWarnings("unchecked") final List<String> tables = (List<String>) schemaMap.get("tables"); Assertions.assertEquals(1, tables.size()); final String tableId = tables.get(0); Assertions.assertTrue(tableId.startsWith(COLLECTION_NAME + "::")); for (DocumentDbSchemaTable table : schema1.getTableMap().values()) { final List<String> tableKeys = Arrays.asList("uuid", "sqlName", "collectionName", "modifyDate", "columns", "_id"); @SuppressWarnings("unchecked") final Map<String, Object> tableMap = OBJECT_MAPPER.convertValue( table, Map.class); Assertions.assertTrue(tableMap.keySet().containsAll(tableKeys)); Assertions.assertTrue(tableKeys.containsAll(tableMap.keySet())); Assertions.assertEquals(tableId, tableMap.get("_id")); Assertions.assertEquals(tableId.split("[:][:]")[1], tableMap.get("uuid")); Assertions.assertEquals(COLLECTION_NAME, tableMap.get("sqlName")); Assertions.assertEquals(COLLECTION_NAME, tableMap.get("collectionName")); Assertions.assertTrue(tableMap.get("columns") instanceof List<?>); @SuppressWarnings("unchecked") final List<DocumentDbSchemaColumn> columns = (List<DocumentDbSchemaColumn>) tableMap.get("columns"); Assertions.assertEquals(13, columns.size()); } } @DisplayName("Tests equals() method with different combinations.") @Test void testEquals() { final Date date = new Date(100); final Date otherDate = new Date(200); final Set<String> tables = new LinkedHashSet<>(); tables.add("table"); final DocumentDbSchema schema1 = new DocumentDbSchema("_default", 1, "testDb", date, null); final DocumentDbSchema schema2 = new DocumentDbSchema("_default", 1, "testDb", date, null); final DocumentDbSchema schema3 = new DocumentDbSchema("_default", 2, "testDb", date, null); final DocumentDbSchema schema4 = new DocumentDbSchema("_other", 1, "testDb", date, null); final DocumentDbSchema schema5 = new DocumentDbSchema("_default", 1, "otherTestDb", date, null); final DocumentDbSchema schema6 = new DocumentDbSchema("_default", 1, "testDb", otherDate, null); final DocumentDbSchema schema7 = new DocumentDbSchema("_default", 1, "testDb", date, tables); Assertions.assertTrue(schema1.equals(schema1)); Assertions.assertTrue(schema1.equals(schema2)); Assertions.assertFalse(schema1.equals(schema3)); Assertions.assertFalse(schema1.equals(schema4)); Assertions.assertFalse(schema1.equals(schema5)); Assertions.assertFalse(schema1.equals(schema6)); Assertions.assertFalse(schema1.equals(schema7)); Assertions.assertFalse(schema1.equals(new Object())); } }
4,550
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbTableSchemaGeneratorVirtualTableTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import org.bson.BsonDocument; import org.bson.BsonType; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.combinePath; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.toName; /** * These tests check that the correct virtual tables are generated given documents with varying nested structures. */ public class DocumentDbTableSchemaGeneratorVirtualTableTest extends DocumentDbTableSchemaGeneratorTest { /** * Tests a two-level document. */ @DisplayName("Tests a two-level document.") @Test void testComplexTwoLevelDocument() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"doc\" : { \"field\" : 1 } }"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); final DocumentDbMetadataTable baseTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertNotNull(baseTable); Assertions.assertEquals(1, baseTable.getColumnMap().size()); DocumentDbSchemaColumn schemaColumn = baseTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(schemaColumn); Assertions.assertEquals(1, schemaColumn.getIndex(baseTable).orElse(null)); Assertions.assertEquals(1, schemaColumn.getPrimaryKeyIndex(baseTable).orElse(null)); Assertions.assertNull(schemaColumn.getForeignKeyIndex(baseTable).orElse(null)); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) schemaColumn; Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc" final DocumentDbMetadataTable virtualTable = (DocumentDbMetadataTable) metadata .get(toName(combinePath(COLLECTION_NAME, "doc"), tableNameMap)); Assertions.assertEquals(2, virtualTable.getColumnMap().size()); // _id foreign key column schemaColumn = virtualTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(schemaColumn); Assertions.assertEquals(1, schemaColumn.getIndex(virtualTable).orElse(null)); Assertions.assertEquals(1, schemaColumn.getPrimaryKeyIndex(virtualTable).orElse(null)); Assertions.assertEquals(1, schemaColumn.getForeignKeyIndex(baseTable).orElse(null)); metadataColumn = (DocumentDbMetadataColumn) schemaColumn; Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); schemaColumn = virtualTable.getColumnMap().get("field"); Assertions.assertNotNull(schemaColumn); Assertions.assertEquals(2, schemaColumn.getIndex(virtualTable).orElse(null)); Assertions.assertEquals(0, schemaColumn.getPrimaryKeyIndex(virtualTable).orElse(null)); Assertions.assertNull(schemaColumn.getForeignKeyIndex(baseTable).orElse(null)); metadataColumn = (DocumentDbMetadataColumn) schemaColumn; Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("doc", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertFalse(metadataColumn.isIndex()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } /** * Tests a three-level document. */ @DisplayName("Tests a three-level document.") @Test void testComplexThreeLevelDocument() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"doc\" : { \"field\" : 1, \"doc2\" : { \"field2\" : \"value\" } } }"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(3, metadata.size()); DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc" metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "doc"), tableNameMap)); Assertions.assertEquals(2, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("doc", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertFalse(metadataColumn.isIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc2" final String parentPath = "doc"; metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(combinePath(COLLECTION_NAME, parentPath), "doc2"), tableNameMap)); Assertions.assertEquals(2, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field2"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals( combinePath(combinePath(parentPath, "doc2"), "field2"), metadataColumn.getFieldPath()); Assertions.assertEquals("field2", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } /** * Tests a single-level array as virtual table. */ @DisplayName("Tests a single-level array as virtual table.") @Test void testComplexSingleLevelArray() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ 1, 2, 3 ] }"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc" metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getForeignKeyColumnName()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertTrue(metadataColumn.isGenerated()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); // value key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals("value", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } /** * Tests a two-level array as virtual table. */ @DisplayName("Tests a two-level array as virtual table.") @Test void testComplexTwoLevelArray() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ [1, 2, 3 ], [ 4, 5, 6 ], [7, 8, 9 ] ]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id",metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc" metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(4, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals(COLLECTION_NAME + "__id", metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_1"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_1"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(3, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // value key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals("value", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } @Test void testComplexSingleLevelWithDocumentsWithArray() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ { \"field\" : 1, \"field1\": \"value\" }, { \"field\" : 2, \"array2\" : [ \"a\", \"b\", \"c\" ] } ]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(3, metadata.size()); DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap() .get(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for array with name "array" metadataTable = (DocumentDbMetadataTable) metadata.get(toName(combinePath( COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(4, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id",metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertTrue(metadataColumn.isIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertTrue(metadataColumn.isGenerated()); // document column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // document column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field1"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field1"), metadataColumn.getFieldPath()); Assertions.assertEquals("field1", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for array in array metadataTable = (DocumentDbMetadataTable) metadata.get(toName(combinePath(combinePath( COLLECTION_NAME, "array"), "array2"), tableNameMap)); Assertions.assertEquals(4, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id",metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(2, metadataColumn.getForeignKeyIndex()); Assertions.assertTrue(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array_array2", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array.array2", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array_array2", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(3, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertTrue(metadataColumn.isGenerated()); // value column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "array2"), metadataColumn.getFieldPath()); Assertions.assertEquals("value", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } /** * Tests a two-level array as virtual table with multiple documents. */ @DisplayName("Tests a two-level array as virtual table with multiple documents.") @Test void testComplexSingleLevelArrayWithDocuments() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ { \"field\" : 1, \"field1\": \"value\" }, { \"field\" : 2, \"field2\" : \"value\" } ]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap() .get(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc" metadataTable = (DocumentDbMetadataTable) metadata.get(toName(combinePath( COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(5, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id",metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertTrue(metadataColumn.isGenerated()); // document column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // document column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field1"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field1"), metadataColumn.getFieldPath()); Assertions.assertEquals("field1", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // document column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field2"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field2"), metadataColumn.getFieldPath()); Assertions.assertEquals("field2", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } /** * Tests a three-level document with multiple documents. */ @DisplayName("Tests a three-level document with multiple documents.") @Test void testComplexThreeLevelMultipleDocuments() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"doc\" : { \n" + " \"field\" : 1, \n" + " \"doc2\" : { \n" + " \"field1\" : \"value\" \n" + " } \n" + "} \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"doc\" : { \n" + " \"field\" : 1, \n" + " \"doc2\" : { \n" + " \"field2\" : \"value\" \n" + " } \n" + " } \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(3, metadata.size()); DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc" metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "doc"), tableNameMap)); Assertions.assertEquals(2, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("doc", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // Virtual table for document with name "doc2" final String parentPath = "doc"; metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(combinePath(COLLECTION_NAME, parentPath), "doc2"), tableNameMap)); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); // _id foreign key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field1"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals( combinePath(combinePath(parentPath, "doc2"), "field1"), metadataColumn.getFieldPath()); Assertions.assertEquals("field1", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field2"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals( combinePath(combinePath(parentPath, "doc2"), "field2"), metadataColumn.getFieldPath()); Assertions.assertEquals("field2", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } @Test void testDeepStructuredData() { final Map<String, String> tableNameMap = new HashMap<>(); final String json = "{\n" + " \"_id\" : \"60a2c0c65be86c8f6a007514\",\n" + " \"field\" : \"string\",\n" + " \"count\" : 19,\n" + " \"timestamp\" : \"2021-05-17T19:15:18.316Z\",\n" + " \"subDocument\" : {\n" + " \"field\" : \"ABC\",\n" + " \"field2\" : [\n" + " \"A\",\n" + " \"B\",\n" + " \"C\"\n" + " ]\n" + " },\n" + " \"twoLevelArray\" : [\n" + " [\n" + " 1,\n" + " 2\n" + " ],\n" + " [\n" + " 3,\n" + " 4\n" + " ],\n" + " [\n" + " 5,\n" + " 6\n" + " ]\n" + " ],\n" + " \"nestedArray\" : [\n" + " {\n" + " \"document\" : 0,\n" + " \"innerArray\" : [\n" + " 1,\n" + " 2,\n" + " 3\n" + " ]\n" + " },\n" + " {\n" + " \"document\" : 1,\n" + " \"innerArray\" : [\n" + " 1,\n" + " 2,\n" + " 3\n" + " ]\n" + " },\n" + " {\n" + " \"document\" : 2,\n" + " \"innerArray\" : [\n" + " 1,\n" + " 2,\n" + " 3\n" + " ]\n" + " }\n" + " ],\n" + " \"nestedSubDocument\" : {\n" + " \"field\" : 0,\n" + " \"subDoc0\" : {\n" + " \"field\" : 1,\n" + " \"subDoc1\" : {\n" + " \"field\" : 2\n" + " }\n" + " }\n" + " }\n" + "}\n"; final BsonDocument document = BsonDocument.parse(json); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Collections.singleton(document).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(9, metadata.size()); final DocumentDbSchemaTable baseTable = metadata.get(COLLECTION_NAME); Assertions.assertNotNull(baseTable); Assertions.assertEquals(4, baseTable.getColumns().size()); final DocumentDbSchemaTable subDocument = metadata .get(toName(combinePath(COLLECTION_NAME, "subDocument"), tableNameMap)); Assertions.assertNotNull(subDocument); Assertions.assertEquals(2, subDocument.getColumns().size()); final DocumentDbSchemaTable subDocumentField2 = metadata .get(toName(combinePath(combinePath( COLLECTION_NAME, "subDocument"), "field2"), tableNameMap)); Assertions.assertNotNull(subDocumentField2); Assertions.assertEquals(3, subDocumentField2.getColumns().size()); final DocumentDbSchemaTable twoLevelArray = metadata .get(toName(combinePath( COLLECTION_NAME, "twoLevelArray"), tableNameMap)); Assertions.assertNotNull(twoLevelArray); Assertions.assertEquals(4, twoLevelArray.getColumns().size()); final DocumentDbSchemaTable nestedArray = metadata .get(toName(combinePath( COLLECTION_NAME, "nestedArray"), tableNameMap)); Assertions.assertNotNull(nestedArray); Assertions.assertEquals(3, nestedArray.getColumns().size()); final DocumentDbSchemaTable nestedArrayInnerArray = metadata .get(toName(combinePath(combinePath( COLLECTION_NAME, "nestedArray"), "innerArray"), tableNameMap)); Assertions.assertNotNull(nestedArrayInnerArray); Assertions.assertEquals(4, nestedArrayInnerArray.getColumns().size()); final DocumentDbSchemaTable nestedSubDocument = metadata .get(toName(combinePath( COLLECTION_NAME, "nestedSubDocument"), tableNameMap)); Assertions.assertNotNull(nestedSubDocument); Assertions.assertEquals(2, nestedSubDocument.getColumns().size()); final DocumentDbSchemaTable subDoc0 = metadata .get(toName(combinePath(combinePath( COLLECTION_NAME, "nestedSubDocument"), "subDoc0"), tableNameMap)); Assertions.assertNotNull(subDoc0); final DocumentDbSchemaTable subDoc1 = metadata .get(toName(combinePath(combinePath(combinePath( COLLECTION_NAME, "nestedSubDocument"), "subDoc0"), "subDoc1"), tableNameMap)); Assertions.assertNotNull(subDoc1); } @DisplayName("Test whether a sub-document with '_id' as a field is handled correctly.") @Test void testNestedDocumentWithIdInSubDocument() { final String nestedJson = "{\n" + " \"_id\": { \"$oid\": \"607d96b40352ee001f493a73\" },\n" + " \"language\": \"en\",\n" + " \"tags\": [],\n" + " \"title\": \"TT Eval\",\n" + " \"name\": \"tt_eval\",\n" + " \"type\": \"form\",\n" + " \"created\": \"2021-04-19T14:41:56.252Z\",\n" + " \"modified\": \"2021-04-19T14:41:56.252Z\",\n" + " \"owner\": \"12345\",\n" + " \"components\": [\n" + " {\n" + " \"_id\": { \"$oid\": \"607d96b40352ee001f493aca\" },\n" + " \"label\": \"Objective\",\n" + " \"required\": false,\n" + " \"tooltip\": \"additional note go here to describe context of question 54\",\n" + " \"name\": \"tteval-54\",\n" + " \"type\": \"section\",\n" + " \"components\": [\n" + " {\n" + " \"_id\": { \"$oid\": \"607d96b50352ee001f493bdc\" },\n" + " \"label\": \"Strength/ROM\",\n" + " \"required\": false,\n" + " \"tooltip\": \"additional note go here to describe context of question 221\",\n" + " \"name\": \"tteval-221\",\n" + " \"type\": \"section\",\n" + " }\n" + " ]\n" + " }\n" + " ],\n" + " \"__v\": { \"$numberInt\": \"0\" }\n" + "}\n"; final BsonDocument document = BsonDocument.parse(nestedJson); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Collections.singleton(document).iterator()); Assertions.assertNotNull(metadata); } @DisplayName("Tests when some arrays are empty") @Test void testEmptyNonEmptyObjectArray() { final Map<String, String> tableNameMap = new HashMap<>(); final String nestedJson = "{\n" + " \"_id\": { \"$oid\": \"607d96b40352ee001f493a73\" },\n" + " \"language\": \"en\",\n" + " \"tags\": [],\n" + " \"title\": \"TT Eval\",\n" + " \"name\": \"tt_eval\",\n" + " \"type\": \"form\",\n" + " \"created\": \"2021-04-19T14:41:56.252Z\",\n" + " \"modified\": \"2021-04-19T14:41:56.252Z\",\n" + " \"owner\": \"12345\",\n" + " \"array\": [\n" + " {\n" + " \"components\": []\n" + " },\n" + " {\n" + " \"components\": [\n" + " {\n" + " \"_id\": { \"$oid\": \"607d96b40352ee001f493acb\" },\n" + " \"label\": \"Objective b\",\n" + " \"required\": false,\n" + " \"tooltip\": \"additional note go here to describe context of question b\",\n" + " \"name\": \"tteval-b\",\n" + " \"type\": \"section\"\n" + " }\n" + " ]\n" + " },\n" + " {\n" + " \"components\": []\n" + " }\n" + " ],\n" + " \"__v\": { \"$numberInt\": \"0\" }\n" + "}\n"; final BsonDocument document = BsonDocument.parse(nestedJson); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Collections.singleton(document).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(4, metadata.size()); DocumentDbSchemaTable schemaTable = metadata.get(COLLECTION_NAME); Assertions.assertNotNull(schemaTable); Assertions.assertEquals(9, schemaTable.getColumns().size()); schemaTable = metadata.get(toName(combinePath(COLLECTION_NAME, "tags"), tableNameMap)); Assertions.assertNotNull(schemaTable); Assertions.assertEquals(3, schemaTable.getColumns().size()); Assertions .assertEquals(JdbcType.NULL, schemaTable.getColumnMap().get("value").getSqlType()); schemaTable = metadata.get(toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertNotNull(schemaTable); Assertions.assertEquals(2, schemaTable.getColumns().size()); schemaTable = metadata.get(toName(combinePath(combinePath( COLLECTION_NAME, "array"), "components"), tableNameMap)); Assertions.assertNotNull(schemaTable); Assertions.assertEquals(9, schemaTable.getColumns().size()); final DocumentDbSchemaColumn id = schemaTable.getColumnMap().get("_id"); Assertions.assertNotNull(id); Assertions.assertEquals(JdbcType.VARCHAR, id.getSqlType()); Assertions.assertEquals(BsonType.OBJECT_ID, id.getDbType()); final DocumentDbSchemaColumn required = schemaTable.getColumnMap().get("required"); Assertions.assertNotNull(required); Assertions.assertEquals(JdbcType.BOOLEAN, required.getSqlType()); Assertions.assertEquals(BsonType.BOOLEAN, required.getDbType()); Assertions.assertNull(schemaTable.getColumnMap().get("value")); } @DisplayName("Tests that sub-document of array is not present on first document, still should have index column.") @Test void testSubDocumentInArrayOnlyInSecondDocument() { final List<BsonDocument> docs = new ArrayList<>(); String documentString; documentString = "{\"_id\":{\"$oid\":\"6050ea8da110dd2c5f279bd0\"},\"data\":{\"_id\":{\"$oid\":\"6050ea8da110dd2c5f279bcf\"},\"locations\":[{\"_id\":\"06c782fe-b89e-43b1-8748-b671ad7d3ad9\"}]}}"; docs.add(BsonDocument.parse(documentString)); documentString = "{\"_id\":{\"$oid\":\"6050ea8ea110dd2c5f279bd4\"},\"data\":{\"_id\":{\"$oid\":\"6050ea8ea110dd2c5f279bd3\"},\"locations\":[{\"_id\":\"06c782fe-b89e-43b1-8748-b671ad7d3ad9\",\"nonRegisteredBranch\":{\"_id\":\"06c782fe-b89e-43b1-8748-b671ad7d3ad9\"}}]}}"; docs.add(BsonDocument.parse(documentString)); final Map<String, DocumentDbSchemaTable> tableMap = DocumentDbTableSchemaGenerator .generate("employmentHistory", docs.iterator()); Assertions.assertEquals(4, tableMap.size()); final DocumentDbSchemaTable table1 = tableMap.get("employmentHistory_data_locations"); Assertions.assertNotNull(table1); Assertions.assertNotNull(table1.getColumnMap().get("data_locations_index_lvl_0")); final DocumentDbSchemaTable table2 = tableMap.get( "employmentHistory_data_locations_nonRegisteredBranch"); Assertions.assertNotNull(table2); Assertions.assertNotNull(table2.getColumnMap().get("data_locations_index_lvl_0")); } @DisplayName("Tests an array with multiple documents and nulls.") @Test void testArrayWithDocumentsAndNull() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ { \"field\" : 1 }, null, { \"field\": 2}, null]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); // Virtual table for array of documents final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get(toName(combinePath( COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); // _id foreign key column DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id",metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertTrue(metadataColumn.isGenerated()); // field column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } @DisplayName("Tests a two-level scalar array with nulls on both levels.") @Test void testTwoLevelArrayWithNulls() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ [1, 2, 3 ], [], [ 4, 5, 6, null ], null]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); // Virtual table for 2 level nested array final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(4, metadataTable.getColumnMap().size()); // _id foreign key column DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals(COLLECTION_NAME + "__id", metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_1"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_1"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(3, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // value key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals("value", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } @DisplayName("Tests a two-level array of documents with nulls in both levels.") @Test void testTwoLevelDocumentArrayWithNulls() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse("{ \"_id\" : \"key\", \"array\" : [ [ {\"field\": 1}, {\"field\": null}, null], null]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); // Virtual table for 2 level nested array of documents final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertEquals(4, metadataTable.getColumnMap().size()); // _id foreign key column DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals(COLLECTION_NAME + "__id", metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_1"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_1"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(3, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // field column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("field", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } @DisplayName("Tests an array of documents with array values and nulls in all levels.") @Test void testDocumentArrayWithNulls() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument.parse("{ \"_id\" : \"key\", \"array\" : [ {\"field\": [1, null]}, {\"field\": null}, null]}"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(3, metadata.size()); // Virtual table for array_field final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array_field"), tableNameMap)); Assertions.assertEquals(4, metadataTable.getColumnMap().size()); // _id foreign key column DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals("_id", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(COLLECTION_NAME, metadataColumn.getForeignKeyTableName()); Assertions.assertEquals(COLLECTION_NAME + "__id", metadataColumn.getForeignKeyColumnName()); Assertions.assertFalse(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(2, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals( toName(combinePath("array", "index_lvl_0"), tableNameMap), metadataColumn.getForeignKeyColumnName()); Assertions.assertEquals( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap), metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // index key column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array_field", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.BIGINT, metadataColumn.getSqlType()); Assertions.assertEquals("array.field", metadataColumn.getFieldPath()); Assertions.assertEquals( toName(combinePath("array_field", "index_lvl_0"), tableNameMap), metadataColumn.getSqlName()); Assertions.assertEquals(3, metadataColumn.getPrimaryKeyIndex()); Assertions.assertTrue(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertTrue(metadataColumn.isGenerated()); // value column metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.INTEGER, metadataColumn.getSqlType()); Assertions.assertEquals(combinePath("array", "field"), metadataColumn.getFieldPath()); Assertions.assertEquals("value", metadataColumn.getSqlName()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertFalse(metadataColumn.isPrimaryKey()); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertNull(metadataColumn.getForeignKeyColumnName()); Assertions.assertNull(metadataColumn.getForeignKeyTableName()); Assertions.assertFalse(metadataColumn.isGenerated()); printMetadataOutput(metadata, getMethodName()); } }
4,551
0
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/test/java/software/amazon/documentdb/jdbc/metadata/DocumentDbTableSchemaGeneratorColumnTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.metadata; import com.google.common.collect.ImmutableSet; import org.bson.BsonArray; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDecimal128; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.BsonType; import org.bson.BsonValue; import org.bson.types.Decimal128; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.combinePath; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getPromotedSqlType; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.toName; /** * These tests check whether columns are generated with the correct type given type conflicts, unsupported types, * and instances of null or missing. */ public class DocumentDbTableSchemaGeneratorColumnTest extends DocumentDbTableSchemaGeneratorTest { /** * Tests a collection where all the fields are consistent. */ @DisplayName("Tests a collection where all the fields are consistent.") @Test void testCreateScalarSingleDepth() { final List<BsonDocument> documentList = new ArrayList<>(); for (int count = 0; count < 3; count++) { // Types not supported in DocumentDB //BsonRegularExpression //BsonJavaScript //BsonJavaScriptWithScope //BsonDecimal128 final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime)) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})); Assertions.assertTrue(documentList.add(document)); } // Discover the collection metadata. final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator.generate( COLLECTION_NAME, documentList.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(1, metadata.size()); Assertions.assertEquals(COLLECTION_NAME, metadata.get(COLLECTION_NAME).getSqlName()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertEquals(13, metadataTable.getColumnMap().size()); final Set<JdbcType> integerSet = metadataTable.getColumnMap().values().stream().collect( Collectors.groupingBy(DocumentDbSchemaColumn::getSqlType)).keySet(); Assertions.assertEquals(9, integerSet.size()); final Set<JdbcType> expectedTypes = ImmutableSet.of( JdbcType.BIGINT, JdbcType.VARBINARY, JdbcType.BOOLEAN, JdbcType.DECIMAL, JdbcType.DOUBLE, JdbcType.INTEGER, JdbcType.NULL, JdbcType.TIMESTAMP, JdbcType.VARBINARY, JdbcType.VARCHAR ); Assertions.assertTrue(expectedTypes.containsAll(integerSet)); printMetadataOutput(metadata, getMethodName()); } /** * This tests SQL type promotion. */ @DisplayName("This tests SQL type promotion.") @Test void testSqlTypesPromotion() { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonType[] supportedBsonTypeSet = new BsonType[]{ BsonType.BINARY, BsonType.BOOLEAN, BsonType.DATE_TIME, BsonType.DECIMAL128, BsonType.DOUBLE, BsonType.INT32, BsonType.INT64, BsonType.MAX_KEY, BsonType.MIN_KEY, BsonType.NULL, BsonType.OBJECT_ID, BsonType.STRING, BsonType.ARRAY, BsonType.DOCUMENT, }; final BsonValue[] supportedBsonValueSet = new BsonValue[]{ new BsonBinary(new byte[]{0, 1, 2}), new BsonBoolean(false), new BsonDateTime(dateTime), new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE))), new BsonDouble(Double.MAX_VALUE), new BsonInt32(Integer.MAX_VALUE), new BsonInt64(Long.MAX_VALUE), new BsonMaxKey(), new BsonMinKey(), new BsonNull(), new BsonObjectId(), new BsonString("新年快乐"), BsonArray.parse("[ 1, 2, 3 ]"), BsonDocument.parse("{ \"field\" : \"value\" }"), }; final List<BsonDocument> documentList = new ArrayList<>(); for (int outerIndex = 0; outerIndex < supportedBsonTypeSet.length; outerIndex++) { final BsonType bsonType = supportedBsonTypeSet[outerIndex]; final BsonValue bsonValue = supportedBsonValueSet[outerIndex]; final JdbcType initSqlType = getPromotedSqlType(bsonType, JdbcType.NULL); final BsonDocument initDocument = new BsonDocument() .append("_id", new BsonObjectId()) .append("field", bsonValue); for (int innerIndex = 0; innerIndex < supportedBsonTypeSet.length; innerIndex++) { documentList.clear(); final BsonValue nextBsonValue = supportedBsonValueSet[innerIndex]; final BsonType nextBsonType = supportedBsonTypeSet[innerIndex]; final JdbcType nextSqlType = getPromotedSqlType( nextBsonType, initSqlType); final BsonDocument nextDocument = new BsonDocument() .append("_id", new BsonObjectId()) .append("field", nextBsonValue); documentList.add(initDocument); documentList.add(nextDocument); // discover the collection metadata final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documentList.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(producesVirtualTable(bsonType, nextBsonType) ? 2 : 1, metadata.size(), String.format("%s:%s", bsonType, nextBsonType)); Assertions.assertEquals(COLLECTION_NAME, metadata.get( COLLECTION_NAME).getSqlName()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertEquals(producesVirtualTable(bsonType, nextBsonType) ? 1 : 2, metadataTable.getColumnMap().size(), String.format("%s:%s", bsonType, nextBsonType)); final DocumentDbSchemaColumn metadataColumn = metadataTable.getColumnMap().get( "field"); if (!producesVirtualTable(bsonType, nextBsonType)) { Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(nextSqlType, metadataColumn.getSqlType()); } else { Assertions.assertNull(metadataColumn); } } } } /** * This tests unsupported scalar type promotion. */ @DisplayName("This tests unsupported scalar type promotion.") @Test void testUnsupportedCreateScalarPromotedSqlTypes() { final BsonType[] unsupportedBsonTypeSet = new BsonType[] { BsonType.DB_POINTER, BsonType.JAVASCRIPT, BsonType.JAVASCRIPT_WITH_SCOPE, BsonType.REGULAR_EXPRESSION, BsonType.SYMBOL, BsonType.UNDEFINED }; // Unsupported types promote to VARCHAR. for (final BsonType bsonType : unsupportedBsonTypeSet) { Assertions.assertEquals( JdbcType.VARCHAR, getPromotedSqlType(bsonType, JdbcType.NULL)); } } /** * Tests whether all columns are found, even if missing at first. */ @DisplayName("Tests whether all columns are found, even if missing at first.") @Test void testCreateScalarFieldsMissing() { final List<BsonDocument> documentList = new ArrayList<>(); for (int count = 0; count < 13; count++) { final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()); if (count == 1) { document.append("fieldDouble", new BsonDouble(Double.MAX_VALUE)); } if (count == 2) { document.append("fieldString", new BsonString("新年快乐")); } if (count == 3) { document.append("fieldObjectId", new BsonObjectId()); } if (count == 4) { document.append("fieldBoolean", new BsonBoolean(true)); } if (count == 5) { document.append("fieldDate", new BsonDateTime(dateTime)); } if (count == 6) { document.append("fieldInt", new BsonInt32(Integer.MAX_VALUE)); } if (count == 7) { document.append("fieldLong", new BsonInt64(Long.MAX_VALUE)); } if (count == 8) { document.append("fieldMaxKey", new BsonMaxKey()); } if (count == 9) { document.append("fieldMinKey", new BsonMinKey()); } if (count == 10) { document.append("fieldNull", new BsonNull()); } if (count == 11) { document.append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})); } if (count == 12) { document.append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))); } Assertions.assertTrue(documentList.add(document)); } // Discover the collection metadata. final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documentList.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(1, metadata.size()); Assertions.assertEquals(COLLECTION_NAME, metadata.get(COLLECTION_NAME).getSqlName()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(13, metadataTable.getColumnMap().size()); final Set<JdbcType> integerSet = metadataTable.getColumnMap().values().stream().collect( Collectors.groupingBy(DocumentDbSchemaColumn::getSqlType)).keySet(); Assertions.assertEquals(9, integerSet.size()); final Set<JdbcType> expectedTypes = ImmutableSet.of( JdbcType.BIGINT, JdbcType.VARBINARY, JdbcType.BOOLEAN, JdbcType.DECIMAL, JdbcType.DOUBLE, JdbcType.INTEGER, JdbcType.NULL, JdbcType.TIMESTAMP, JdbcType.VARBINARY, JdbcType.VARCHAR ); Assertions.assertTrue(expectedTypes.containsAll(integerSet)); final Map<String, String> tableNameMap = new HashMap<>(); // Check for in-order list of columns. int columnIndex = 0; for (Entry<String, DocumentDbSchemaColumn> entry : metadataTable.getColumnMap() .entrySet()) { Assertions.assertTrue(0 != columnIndex || toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap).equals(entry.getKey())); Assertions.assertTrue(1 != columnIndex || "fieldDouble".equals(entry.getKey())); Assertions.assertTrue(2 != columnIndex || "fieldString".equals(entry.getKey())); Assertions.assertTrue(3 != columnIndex || "fieldObjectId".equals(entry.getKey())); Assertions.assertTrue(4 != columnIndex || "fieldBoolean".equals(entry.getKey())); Assertions.assertTrue(5 != columnIndex || "fieldDate".equals(entry.getKey())); Assertions.assertTrue(6 != columnIndex || "fieldInt".equals(entry.getKey())); Assertions.assertTrue(7 != columnIndex || "fieldLong".equals(entry.getKey())); Assertions.assertTrue(8 != columnIndex || "fieldMaxKey".equals(entry.getKey())); Assertions.assertTrue(9 != columnIndex || "fieldMinKey".equals(entry.getKey())); Assertions.assertTrue(10 != columnIndex || "fieldNull".equals(entry.getKey())); Assertions.assertTrue(11 != columnIndex || "fieldBinary".equals(entry.getKey())); Assertions.assertTrue(12 != columnIndex || "fieldDecimal128".equals(entry.getKey())); columnIndex++; } printMetadataOutput(metadata, getMethodName()); } /** * Tests inconsistent data types in arrays should not fail. */ @DisplayName("Tests inconsistent data types in arrays should not fail.") @Test void testInconsistentArrayDataType() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument[] tests = new BsonDocument[] { BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ 1, [ 2 ], null ] }"), BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ 1, { \"field\" : 2 } ] }"), BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ { \"field\" : 2 }, 3 ] }"), BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ { \"field\" : 2 }, [ 3, 4 ] ] }"), BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ [ 1, 2 ], { \"field\" : 2 }, null ] }"), BsonDocument.parse( "{ \"_id\" : \"key\", \"array\" : [ [ 1, 2 ], 2 ] }"), }; for (BsonDocument document : tests) { final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Arrays.stream((new BsonDocument[]{document})).iterator()); Assertions.assertEquals(2, metadata.size()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable .getColumnMap().get(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(1, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath("array", "index_lvl_0"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(2, metadataColumn.getPrimaryKeyIndex()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(0, metadataColumn.getForeignKeyIndex()); Assertions.assertEquals(0, metadataColumn.getPrimaryKeyIndex()); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); printMetadataOutput(metadata, getMethodName()); } } /** * Tests that the "_id" field of type DOCUMENT will be promoted to VARCHAR. */ @DisplayName("Tests that the \"_id\" field of type DOCUMENT will be promoted to VARCHAR.") @Test void testIdFieldIsDocument() { final Map<String, String> tableNameMap = new HashMap<>(); final BsonDocument document = BsonDocument .parse("{ \"_id\" : { \"field\" : 1 }, \"field2\" : 2 }"); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, Collections.singletonList(document).iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(1, metadata.size()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(2, metadataTable.getColumnMap().size()); final DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); Assertions.assertEquals(1, metadataColumn.getPrimaryKeyIndex()); printMetadataOutput(metadata, getMethodName()); } /** * Test whether a conflict is detected in inconsistent arrays over multiple documents. * Here, array of object, then array of integer */ @DisplayName("Test whether a conflict is detected in inconsistent arrays over multiple documents. Here, array of object, then array of integer.") @Test void testMultiDocumentInconsistentArrayDocumentToInt32() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"array\" : [ {\n" + " \"field1\" : 1, \n" + " \"field2\" : 2 \n" + " } ] \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"array\" : [ 1, 2, 3 ] \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); final DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); printMetadataOutput(metadata, getMethodName()); } /** * Test whether a conflict is detected in inconsistent arrays over multiple documents. * Here, array of array of integer, then array of integer */ @DisplayName("Test whether a conflict is detected in inconsistent arrays over multiple documents. Here, array of array of integer, then array of integer") @Test void testMultiDocumentInconsistentArrayOfArrayToInt32() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"array\" : [ [ 1, 2 ], [ 3, 4 ] ] \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"array\" : [ 1, 2, 3 ] \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); final DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); printMetadataOutput(metadata, getMethodName()); } /** * Test whether empty sub-documents are handled. */ @DisplayName("Test whether empty sub-documents are handled.") @Test void testEmptyDocuments() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"doc\" : { } \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : \"key2\", \n" + " \"doc\" : { } \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "doc"), tableNameMap)); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(1, metadataTable.getColumnMap().size()); final DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap() .get(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); printMetadataOutput(metadata, getMethodName()); } /** * Test whether null scalars are handled. */ @DisplayName("Test whether null scalars are handled.") @Test void testNullScalar() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"field\" : null \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : \"key2\", \n" + " \"field\" : null \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(1, metadata.size()); final DocumentDbSchemaTable metadataTable = metadata.get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(2, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap() .get(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap() .get("field"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.NULL, metadataColumn.getSqlType()); printMetadataOutput(metadata, getMethodName()); } /** * Test whether empty array is handled. */ @DisplayName("Test whether empty array is handled.") @Test void testEmptyArray() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : \"key\", \n" + " \"array\" : [ ] \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : \"key2\", \n" + " \"array\" : [ ] \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); Assertions.assertEquals(2, metadata.size()); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata.get( toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertNotNull(metadataTable); Assertions.assertEquals(3, metadataTable.getColumnMap().size()); DocumentDbMetadataColumn metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap() .get(toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.VARCHAR, metadataColumn.getSqlType()); metadataColumn = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get("value"); Assertions.assertNotNull(metadataColumn); Assertions.assertEquals(JdbcType.NULL, metadataColumn.getSqlType()); printMetadataOutput(metadata, getMethodName()); } /** * Test that primary and foreign key have consistent type after conflict. */ @DisplayName("Test that primary and foreign key have consistent type after conflict.") @Test void testPrimaryKeyScalarTypeInconsistency() { final Map<String, String> tableNameMap = new HashMap<>(); final List<BsonDocument> documents = new ArrayList<>(); BsonDocument document = BsonDocument.parse( "{ \"_id\" : 1, \n" + " \"array\" : [1, 1, 1] \n" + "}" ); documents.add(document); document = BsonDocument.parse( "{ \"_id\" : 2.1, \n" + " \"array\" : [ 0.0, 0.0, 0.0] \n" + "}" ); documents.add(document); final Map<String, DocumentDbSchemaTable> metadata = DocumentDbTableSchemaGenerator .generate(COLLECTION_NAME, documents.iterator()); Assertions.assertNotNull(metadata); final DocumentDbMetadataTable metadataArrayTable = (DocumentDbMetadataTable) metadata .get(toName(combinePath(COLLECTION_NAME, "array"), tableNameMap)); Assertions.assertNotNull(metadataArrayTable); final DocumentDbMetadataColumn metadataColumnArrayId = (DocumentDbMetadataColumn) metadataArrayTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumnArrayId); final DocumentDbMetadataTable metadataTable = (DocumentDbMetadataTable) metadata .get(COLLECTION_NAME); Assertions.assertNotNull(metadataTable); final DocumentDbMetadataColumn metadataColumnId = (DocumentDbMetadataColumn) metadataTable.getColumnMap().get( toName(combinePath(COLLECTION_NAME, "_id"), tableNameMap)); Assertions.assertNotNull(metadataColumnId); Assertions.assertEquals(metadataColumnId.getSqlType(), metadataColumnArrayId.getSqlType(), "Type of _id columns (DocumentDbTableSchemaGeneratorTest._id and " + "DocumentDbCollectionMetadataTest_array._id) should match"); Assertions.assertEquals(metadataColumnArrayId.getSqlType(), JdbcType.DOUBLE, "Type of ID columns (DocumentDbTableSchemaGeneratorTest._id and " + "DocumentDbCollectionMetadataTest_array._id) should be DOUBLE (" + JdbcType.DOUBLE + ")"); } }
4,552
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbFlapDoodleTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import com.mongodb.BasicDBObject; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.result.InsertOneResult; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDecimal128; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.types.Decimal128; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestInstance; import java.time.Instant; import java.util.Collections; /** * Base class for DocumentDb FlapDoodle tests */ @TestInstance(TestInstance.Lifecycle.PER_CLASS) public class DocumentDbFlapDoodleTest extends DocumentDbTest { protected static final String ADMIN_DATABASE = "admin"; protected static final String ADMIN_USERNAME = "admin"; protected static final String ADMIN_PASSWORD = "admin"; @BeforeAll void init(final Integer mongoPort) { setMongoPort(mongoPort); } /** * Creates a user to the admin database with dbOwner role on another database. * @param databaseName the name of database to grant access for the user. * @param username the user name to create. * @param password the password for the user. */ protected static void createUser( final String databaseName, final String username, final String password) { createUser(ADMIN_DATABASE, databaseName, username, password); } /** * Creates a user in authentication database with dbOwner role on another database. * @param authenticationDatabaseName the name of the database to create the user in. Authentication must * be in the context of this database. * @param databaseName the name of database to grant access for the user. * @param username the user name to create. * @param password the password for the user. */ protected static void createUser( final String authenticationDatabaseName, final String databaseName, final String username, final String password) { try (MongoClient client = createMongoClient(ADMIN_DATABASE, ADMIN_USERNAME, ADMIN_PASSWORD)) { final MongoDatabase db = client.getDatabase(authenticationDatabaseName); final BasicDBObject createUserCommand = new BasicDBObject("createUser", username) .append("pwd", password) .append("roles", Collections.singletonList(new BasicDBObject("role", "dbOwner").append("db", databaseName))); db.runCommand(createUserCommand); } } /** * Prepares data for a given database and collection. * @param databaseName - the name of the database to insert data into. * @param collectionName - the name of the collection to insert data into. * @param recordCount - the number of records to insert data into. */ protected static void prepareSimpleConsistentData( final String databaseName, final String collectionName, final int recordCount, final String username, final String password) { try (MongoClient client = createMongoClient(ADMIN_DATABASE, username, password)) { final MongoDatabase database = client.getDatabase(databaseName); final MongoCollection<BsonDocument> collection = database .getCollection(collectionName, BsonDocument.class); for (int count = 0; count < recordCount; count++) { // Types not supported in DocumentDB //BsonRegularExpression //BsonJavaScript //BsonJavaScriptWithScope //BsonDecimal128 final long dateTime = Instant.parse("2020-01-01T00:00:00.00Z").toEpochMilli(); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime)) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[] { 0, 1, 2 })) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))); final InsertOneResult result = collection.insertOne(document); Assertions.assertEquals(count + 1, collection.countDocuments()); Assertions.assertEquals(document.getObjectId("_id"), result.getInsertedId()); } } } protected static void insertBsonDocuments( final String collectionName, final String databaseName, final String user, final String password, final BsonDocument[] documents) { try (MongoClient client = createMongoClient(ADMIN_DATABASE, user, password)) { insertBsonDocuments(collectionName, databaseName, documents, client); } } protected static void insertBsonDocuments( final String collectionName, final String databaseName, final BsonDocument[] documents, final MongoClient client) { final MongoDatabase database = client.getDatabase(databaseName); final MongoCollection<BsonDocument> collection = database.getCollection(collectionName, BsonDocument.class); for (int count = 0; count < documents.length; count++) { collection.insertOne(documents[count]); Assertions.assertEquals(count + 1, collection.countDocuments()); } } }
4,553
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbDocumentDbTestEnvironment.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; import org.bson.BsonDocument; import org.bson.BsonType; import java.sql.SQLException; import java.util.UUID; public class DocumentDbDocumentDbTestEnvironment extends DocumentDbAbstractTestEnvironment { private static final int DEFAULT_PORT = 27019; private static final String DOC_DB_HOST_LOCAL = "localhost"; private static final String DOC_DB_USER_NAME_PROPERTY = "DOC_DB_USER_NAME"; private static final String DOC_DB_PASSWORD_PROPERTY = "DOC_DB_PASSWORD"; private static final String DOC_DB_LOCAL_PORT_PROPERTY = "DOC_DB_LOCAL_PORT"; private static final String DOC_DB_CONNECTION_OPTIONS = "?tls=true&tlsAllowInvalidHostnames=true"; private static final String DOC_DB_INTEGRATION_DATABASE = "integration"; private static final String RESTRICTED_USERNAME = "docDbRestricted"; private String databaseName; private final int port; DocumentDbDocumentDbTestEnvironment() { super(DOC_DB_HOST_LOCAL, System.getenv(DOC_DB_USER_NAME_PROPERTY), System.getenv(DOC_DB_PASSWORD_PROPERTY), RESTRICTED_USERNAME, DOC_DB_CONNECTION_OPTIONS); databaseName = null; port = getInteger(System.getenv(DOC_DB_LOCAL_PORT_PROPERTY), DEFAULT_PORT); } @Override protected boolean startEnvironment() { databaseName = UUID.randomUUID().toString(); return false; } @Override protected boolean stopEnvironment() { try (MongoClient client = createMongoClient()) { final MongoDatabase database = client.getDatabase(getDatabaseName()); database.runCommand(BsonDocument.parse("{ \"dropDatabase\": 1 }")); } catch (SQLException ex) { // Ignore } finally { databaseName = null; } return false; } @Override protected int getPort() { return port; } @Override protected boolean isBsonTypeCompatible(final BsonType bsonType) { switch (bsonType) { case DB_POINTER: case END_OF_DOCUMENT: case JAVASCRIPT: case JAVASCRIPT_WITH_SCOPE: case SYMBOL: case UNDEFINED: return false; default: return true; } } @Override public String getDatabaseName() { return databaseName; } @Override public String toString() { return DocumentDbDocumentDbTestEnvironment.class.getSimpleName() + "{" + " databaseName='" + databaseName + '\'' + ", username='" + getUsername() + '\'' + ", port=" + port + ", enableAuthentication=" + true + " }"; } private static Integer getInteger(final String value, final Integer defaultValue) { try { return Integer.parseInt(value); } catch (NumberFormatException e) { return defaultValue; } } }
4,554
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbAbstractTestEnvironment.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import com.mongodb.MongoException; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.result.InsertOneResult; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDateTime; import org.bson.BsonDecimal128; import org.bson.BsonDocument; import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonInt64; import org.bson.BsonMaxKey; import org.bson.BsonMinKey; import org.bson.BsonNull; import org.bson.BsonObjectId; import org.bson.BsonString; import org.bson.BsonTimestamp; import org.bson.BsonType; import org.bson.types.Decimal128; import org.checkerframework.checker.nullness.qual.Nullable; import org.junit.jupiter.api.Assertions; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperty; import software.amazon.documentdb.jdbc.DocumentDbMetadataScanMethod; import java.sql.SQLException; import java.time.Instant; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.TimeUnit; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.encodeValue; /** * Provides some base implementation of the {@link DocumentDbTestEnvironment} interface. */ public abstract class DocumentDbAbstractTestEnvironment implements DocumentDbTestEnvironment { protected static final String ADMIN_DATABASE = "admin"; private static final String JDBC_TEMPLATE = "jdbc:documentdb://%s%s:%s/%s%s"; private final String host; private final String username; private final String password; private final String options; private final String restrictedUsername; private final List<Entry<String, String>> temporaryCollections; private boolean isStarted = false; /** * Constructs a new {@link DocumentDbAbstractTestEnvironment} object. * @param host the host the test environment connects to. * @param username the user name on the test host. * @param password the password for the user on the test host. * @param options the options (if any) for mongo driver connection. */ protected DocumentDbAbstractTestEnvironment( final String host, final String username, final String password, final String restrictedUsername, @Nullable final String options) { this.host = host; this.username = username; this.password = password; this.options = options; this.restrictedUsername = restrictedUsername; this.temporaryCollections = new ArrayList<>(); } /** * Perform environment-specific initialization. * * @return true, if a new environment was started, false, if the environment was already started. */ protected abstract boolean startEnvironment() throws Exception; /** * Performs environment-specific clean-up. * * @return true, if the environment was stopped, false, if the environment was already stopped. */ protected abstract boolean stopEnvironment() throws Exception; /** * Gets the port number for the environment. * * @return the port number. */ protected abstract int getPort(); /** * Gets an indicator of whether the given type is compatible for this environment. * * @param bsonType the {@link BsonType} to check. * @return true, if the given type is compatible. */ protected abstract boolean isBsonTypeCompatible(final BsonType bsonType); /** * Gets an indicator of whether the environment is started. * * @return true, if the environment is started, false, otherwise. */ protected boolean isStarted() { return isStarted; } /** * Gets the user name. * * @return the user name. */ protected String getUsername() { return username; } protected String getRestrictedUsername() { return restrictedUsername; } /** * Gets the password. * * @return the password. */ protected String getPassword() { return password; } @Override public String getJdbcConnectionString() { return String.format(JDBC_TEMPLATE, getCredentials(), getHost(), getPort(), getDatabaseName(), getOptions()); } @Override public String getRestrictedUserConnectionString() { final String jdbcTemplate = "jdbc:documentdb://%s%s:%s/%s%s"; return String.format(jdbcTemplate, getCredentials(true), getHost(), getPort(), getDatabaseName(), getOptions()); } @Override public abstract String getDatabaseName(); @Override public boolean start() throws Exception { if (!isStarted) { final boolean started = startEnvironment(); isStarted = true; return started; } return false; } @Override public boolean stop() throws Exception { if (isStarted) { try (MongoClient client = createMongoClient()) { for (Entry<String, String> entry : temporaryCollections) { final MongoDatabase database = client.getDatabase(entry.getKey()); final MongoCollection<BsonDocument> collection = database .getCollection(entry.getValue(), BsonDocument.class); try { collection.drop(); } catch (MongoException e) { if (e.getCode() != 13) { throw e; // Ignore 'Authorization failure } } } temporaryCollections.clear(); } } final boolean stopped = stopEnvironment(); isStarted = false; return stopped; } @Override public String newCollectionName(final boolean isTemporary) { final String collectionName = UUID.randomUUID().toString().replaceAll("[-]", ""); if (isTemporary) { temporaryCollections.add(new SimpleEntry<>(getDatabaseName(), collectionName)); } return collectionName; } @Override public MongoClient createMongoClient() throws SQLException { final DocumentDbConnectionProperties properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(getJdbcConnectionString()); return properties.createMongoClient(); } @Override public void prepareSimpleConsistentData(final MongoCollection<BsonDocument> collection, final int recordCount) { for (int count = 0; count < recordCount; count++) { // Types not supported in DocumentDB //BsonRegularExpression //BsonJavaScript //BsonJavaScriptWithScope //BsonDecimal128 final Instant dateTime = Instant.parse("2020-01-01T00:00:00.00Z"); final BsonDocument document = new BsonDocument() .append("_id", new BsonObjectId()) .append("fieldDouble", new BsonDouble(Double.MAX_VALUE)) .append("fieldString", new BsonString("新年快乐")) .append("fieldObjectId", new BsonObjectId()) .append("fieldBoolean", new BsonBoolean(true)) .append("fieldDate", new BsonDateTime(dateTime.toEpochMilli())) .append("fieldInt", new BsonInt32(Integer.MAX_VALUE)) .append("fieldLong", new BsonInt64(Long.MAX_VALUE)) .append("fieldMaxKey", new BsonMaxKey()) .append("fieldMinKey", new BsonMinKey()) .append("fieldNull", new BsonNull()) .append("fieldBinary", new BsonBinary(new byte[]{0, 1, 2})) .append("fieldDecimal128", new BsonDecimal128(Decimal128.parse(String.valueOf(Double.MAX_VALUE)))) .append("fieldTimestamp", new BsonTimestamp((int) TimeUnit.MILLISECONDS.toSeconds(dateTime.toEpochMilli()), 1)); final InsertOneResult result = collection.insertOne(document); Assertions.assertEquals(count + 1, collection.countDocuments()); Assertions.assertEquals(document.getObjectId("_id"), result.getInsertedId()); } } private String getHost() { return host; } private String getOptions() { return options != null ? options.startsWith("?") ? options : "?" + options : ""; } private String getCredentials() { return getCredentials(false); } private String getCredentials(final boolean isRestrictedUser) { return username != null && password != null ? String.format("%s:%s@", encodeValue(isRestrictedUser ? restrictedUsername : username), encodeValue(password)) : ""; } @Override public void insertBsonDocuments(final String collectionName, final BsonDocument[] documents) throws SQLException { try (MongoClient client = createMongoClient()) { final MongoDatabase database = client.getDatabase(getDatabaseName()); final MongoCollection<BsonDocument> collection = database.getCollection(collectionName, BsonDocument.class); for (int count = 0; count < documents.length; count++) { collection.insertOne(documents[count]); Assertions.assertEquals(count + 1, collection.countDocuments()); } } } @Override public String getJdbcConnectionString(final DocumentDbMetadataScanMethod scanMethod) { final String optionsWithScanMethod = getOptions() != null && getOptions().startsWith("?") ? getOptions() + "&" + DocumentDbConnectionProperty.METADATA_SCAN_METHOD + "=" + scanMethod.getName() : "?" + DocumentDbConnectionProperty.METADATA_SCAN_METHOD + scanMethod.getName(); return String.format( JDBC_TEMPLATE, getCredentials(), getHost(), getPort(), getDatabaseName(), optionsWithScanMethod); } }
4,555
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoClients; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.encodeValue; public class DocumentDbTest { private static int mongoPort = -1; /** * Gets the port number of the mongod process is listening to. * @return if the process is running, returns the port the mongod process is listening to, -1 otherwise. */ protected static int getMongoPort() { return mongoPort; } /** * Sets the current listening port. * * @param port the current Mongo server port. */ protected static void setMongoPort(final int port) { mongoPort = port; } /** * Creates a new MongoClient instance using the current port. * * @return a new instance of MongoClient. */ protected static MongoClient createMongoClient() { return createMongoClient(null, null, null); } /** * Creates a new MongoClient instance using the current port. * * @param database the authenticating database to authenticate * @param username the username to authenticate * @param password the password to authenticate * @return a new instance of MongoClient. */ protected static MongoClient createMongoClient( final String database, final String username, final String password) { return createMongoClient(database, username, password, null); } /** * Creates a new MongoClient instance using the current port. * * @param database the authenticating database to authenticate * @param username the username to authenticate * @param password the password to authenticate * @param options any additional options to pass. Format '[?]option=value[&option=value[...]]' * @return a new instance of MongoClient. */ protected static MongoClient createMongoClient( final String database, final String username, final String password, final String options) { final int port = getMongoPort(); final String credentials = username != null && password != null ? String.format("%s:%s@", encodeValue(username), encodeValue(password)) : ""; final String hostname = "localhost"; final String authDatabase = database != null ? "/" + database : "/"; final String optionsValue = options != null ? options.startsWith("?") ? options : "?" + options : ""; return MongoClients.create(String.format("mongodb://%s%s:%s%s%s", credentials, hostname, port, authDatabase, optionsValue)); } }
4,556
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbFlapDoodleExtensionBase.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import de.flapdoodle.embed.mongo.MongoShellStarter; import de.flapdoodle.embed.mongo.MongodExecutable; import de.flapdoodle.embed.mongo.MongodProcess; import de.flapdoodle.embed.mongo.MongodStarter; import de.flapdoodle.embed.mongo.config.Defaults; import de.flapdoodle.embed.mongo.config.ImmutableMongoShellConfig; import de.flapdoodle.embed.mongo.config.ImmutableMongodConfig.Builder; import de.flapdoodle.embed.mongo.config.MongoCmdOptions; import de.flapdoodle.embed.mongo.config.MongoShellConfig; import de.flapdoodle.embed.mongo.config.MongodConfig; import de.flapdoodle.embed.mongo.config.Net; import de.flapdoodle.embed.mongo.distribution.Version.Main; import de.flapdoodle.embed.mongo.packageresolver.Command; import de.flapdoodle.embed.process.config.RuntimeConfig; import de.flapdoodle.embed.process.config.process.ProcessOutput; import de.flapdoodle.embed.process.io.LogWatchStreamProcessor; import de.flapdoodle.embed.process.io.NamedOutputStreamProcessor; import de.flapdoodle.embed.process.io.Processors; import de.flapdoodle.embed.process.io.StreamProcessor; import de.flapdoodle.embed.process.runtime.Network; import org.apache.commons.lang3.StringUtils; import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.api.extension.ParameterContext; import org.junit.jupiter.api.extension.support.TypeBasedParameterResolver; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import static de.flapdoodle.embed.process.io.Processors.namedConsole; import static org.apache.logging.log4j.core.util.Assert.isEmpty; /** * Base Jupiter extension for creating and configuring a FlapDoodle MongoDb instance. */ abstract class DocumentDbFlapDoodleExtensionBase extends TypeBasedParameterResolver<Integer> implements AfterAllCallback, BeforeAllCallback { protected static final int DEFAULT_PORT = 27017; private static final long INIT_TIMEOUT_MS = 30000; private static final String USER_ADDED_TOKEN = "Successfully added user"; private static final String ADMIN_DATABASE = "admin"; private static final String ADMIN_USERNAME = "admin"; private static final String ADMIN_PASSWORD = "admin"; private static MongodConfig mongoConfig; private static MongodExecutable mongoExecutable = null; private static MongodProcess mongoProcess = null; interface ServerHolder { /** * Retrieve the port to connect to the Mongo server. * @return the port number. */ Integer getPort(); /** * Start the Mongo server. * @throws Exception if there is an issue starting the server. */ void start() throws Exception; /** * Stop the Mongo server. * @throws Exception if there is an issue stopping the server. */ void stop() throws Exception; } @Override public void afterAll(final ExtensionContext extensionContext) throws Exception { final ServerHolder h = getHolder(extensionContext, false); if (h != null) { h.stop(); } } @Override public void beforeAll(final ExtensionContext extensionContext) throws Exception { final ServerHolder holder = getHolder(extensionContext, true); if (holder != null) { return; } final ServerHolder newHolder = createHolder(); newHolder.start(); getStore(extensionContext).put(getIdentifier(), newHolder); } @Override public Integer resolveParameter(final ParameterContext parameterContext, final ExtensionContext extensionContext) { return getHolder(extensionContext, true).getPort(); } private ServerHolder getHolder(final ExtensionContext context, final boolean recursive) { for (ExtensionContext currentContext = context; currentContext != null; currentContext = currentContext.getParent().orElse(null)) { final ServerHolder holder = (ServerHolder) getStore(currentContext).get(getIdentifier()); if (holder != null) { return holder; } if (!recursive) { break; } } return null; } private ExtensionContext.Store getStore(final ExtensionContext context) { return context.getStore(ExtensionContext.Namespace.create(getClass(), context)); } /** * Get the identifier for the Holder object for the Mongo server. * @return the identifier for the Mongo server. */ protected abstract String getIdentifier(); /** * Create a new Holder object for the Mongo server. * @return the new holder object. */ protected abstract ServerHolder createHolder(); /** * Starts the mongod using default parameters. * @return returns true if the mongod is started, or false if already started. * @throws IOException if unable to start the mongod. */ protected static boolean startMongoDbInstance() throws IOException { return startMongoDbInstance(Network.freeServerPort(Network.getLocalHost())); } /** * Starts the mongod using custom port number. * @param port the port number that mongod listens on. * @return returns true if the mongod is started, or false if already started. * @throws IOException if unable to start the mongod. */ protected static boolean startMongoDbInstance(final int port) throws IOException { return startMongoDbInstance(port, false); } /** * Starts the mongod using custom command options. * @param enableAuthentication indicates whether to start the process with authentication enabled. * @return returns true if the mongod is started, or false if already started. * @throws IOException if unable to start the mongod. */ protected static boolean startMongoDbInstance(final boolean enableAuthentication) throws IOException { return startMongoDbInstance(Network.freeServerPort(Network.getLocalHost()), enableAuthentication); } /** * Starts the mongod using custom port and command options. * @param port the port number that mongod listens on. * @param enableAuthentication indicates whether to start the process with authentication enabled. * @return returns true if the mongod is started, or false if already started. * @throws IOException if unable to start the mongod. */ protected static synchronized boolean startMongoDbInstance( final int port, final boolean enableAuthentication) throws IOException { if (mongoExecutable != null) { return false; } final MongoCmdOptions cmdOptions = MongoCmdOptions.builder() .auth(enableAuthentication) .build(); final MongodStarter starter = MongodStarter.getDefaultInstance(); final Builder builder = MongodConfig.builder() .version(Main.V4_4) .net(new Net(port, Network.localhostIsIPv6())); if (cmdOptions != null) { builder.cmdOptions(cmdOptions); } mongoConfig = builder.build(); mongoExecutable = starter.prepare(mongoConfig); mongoProcess = mongoExecutable.start(); addAdmin(); return true; } /** * Stops the running mongod process. * @return returns true if the mongod is stopped, or false if already stopped. */ protected static synchronized boolean stopMongoDbInstance() { if (mongoExecutable == null) { return false; } mongoProcess.stop(); mongoExecutable.stop(); mongoExecutable = null; mongoProcess = null; return true; } /** * Gets whether the mongod process is running. * @return returns true if process is running, false otherwise. */ protected static synchronized boolean isMongoDbProcessRunning() { return mongoProcess != null && mongoProcess.isProcessRunning(); } /** * Gets the port number of the mongod process is listening to. * @return if the process is running, returns the port the mongod process is listening to, -1 otherwise. */ protected static synchronized int getMongoPort() { return mongoProcess != null ? mongoProcess.getConfig().net().getPort() : DEFAULT_PORT; } protected static void addAdmin() throws IOException { final String scriptText = StringUtils.join( String.format("db.createUser(" + "{\"user\":\"%s\",\"pwd\":\"%s\"," + "\"roles\":[" + "\"root\"," + "{\"role\":\"userAdmin\",\"db\":\"admin\"}," + "{\"role\":\"dbAdmin\",\"db\":\"admin\"}," + "{\"role\":\"userAdminAnyDatabase\",\"db\":\"admin\"}," + "{\"role\":\"dbAdminAnyDatabase\",\"db\":\"admin\"}," + "{\"role\":\"clusterAdmin\",\"db\":\"admin\"}," + "{\"role\":\"dbOwner\",\"db\":\"admin\"}," + "]});%n", ADMIN_USERNAME, ADMIN_PASSWORD)); runScriptAndWait(scriptText, USER_ADDED_TOKEN, new String[]{"couldn't add user", "failed to load", "login failed"}, ADMIN_DATABASE, null, null); } private static void runScriptAndWait( final String scriptText, final String token, final String[] failures, final String dbName, final String username, final String password) throws IOException { final StreamProcessor mongoOutput; if (!isEmpty(token)) { mongoOutput = new MongoLogWatchStreamProcessor( token, (failures != null) ? new HashSet<>(Arrays.asList(failures)) : Collections.<String>emptySet(), namedConsole("[mongo shell output]")); } else { mongoOutput = new NamedOutputStreamProcessor("[mongo shell output]", Processors.console()); } final RuntimeConfig runtimeConfig = Defaults.runtimeConfigFor(Command.Mongo) .processOutput(ProcessOutput.builder() .output(mongoOutput) .error(Processors.namedConsole("[mongo shell error]")) .commands(Processors.console()) .build()) .build(); final MongoShellStarter starter = MongoShellStarter.getInstance(runtimeConfig); final File scriptFile = writeTmpScriptFile(scriptText); final ImmutableMongoShellConfig.Builder builder = MongoShellConfig.builder(); if (!isEmpty(dbName)) { builder.dbName(dbName); } if (!isEmpty(username)) { builder.userName(username); } if (!isEmpty(password)) { builder.password(password); } starter.prepare(builder .scriptName(scriptFile.getAbsolutePath()) .version(mongoConfig.version()) .net(mongoConfig.net()) .build()).start(); if (mongoOutput instanceof MongoLogWatchStreamProcessor) { ((MongoLogWatchStreamProcessor) mongoOutput).waitForResult(INIT_TIMEOUT_MS); } } private static File writeTmpScriptFile(final String scriptText) throws IOException { final File scriptFile = File.createTempFile("tempfile", ".js"); scriptFile.deleteOnExit(); final Writer writer = new OutputStreamWriter(new FileOutputStream(scriptFile), "UTF-8"); final BufferedWriter bw = new BufferedWriter(writer); bw.write(scriptText); bw.close(); return scriptFile; } /** * Watches the mongo or mongod output stream. */ private static class MongoLogWatchStreamProcessor extends LogWatchStreamProcessor { private final Object mutex = new Object(); private final String success; private final Set<String> failures; private volatile boolean found = false; /** * Creates a new MongoLogWatchStreamProcessor * @param success the string token to watch for to indicate success. * @param failures the set of strings to watch for to indicate failure. * @param destination the stream processor. */ public MongoLogWatchStreamProcessor( final String success, final Set<String> failures, final StreamProcessor destination) { super(success, failures, destination); this.success = success; this.failures = failures; } @Override public void process(final String block) { if (containsSuccess(block) || containsFailure(block)) { synchronized (mutex) { found = true; mutex.notifyAll(); } } else { super.process(block); } } private boolean containsSuccess(final String block) { return block.contains(success); } private boolean containsFailure(final String block) { for (String failure : failures) { if (block.contains(failure)) { return true; } } return false; } /** * Waits for result for a result up to as long as given timeout. * @param timeout the timeout when waiting for a result. */ public void waitForResult(final long timeout) { synchronized (mutex) { try { while (!found) { mutex.wait(timeout); } } catch (InterruptedException e) { e.printStackTrace(); } } } } }
4,557
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbDocumentDbTest.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; /** * Base class for testing against a DocumentDB server. */ public class DocumentDbDocumentDbTest extends DocumentDbTest { private static final int DEFAULT_PORT = 27019; private static final String DOC_DB_USER_NAME_PROPERTY = "DOC_DB_USER_NAME"; private static final String DOC_DB_PASSWORD_PROPERTY = "DOC_DB_PASSWORD"; private static final String DOC_DB_LOCAL_PORT_PROPERTY = "DOC_DB_LOCAL_PORT"; private static Integer originalMongoPort = -1; /** * Sets up the environment for remote testing. * Call {@link DocumentDbDocumentDbTest#restoreOriginalTesting()} when finished testing. */ public static void setupRemoteTesting() { originalMongoPort = getMongoPort(); setMongoPort(getInteger(System.getenv(DOC_DB_LOCAL_PORT_PROPERTY), DEFAULT_PORT)); } /** * Restores the test environment to its previous state. */ public static void restoreOriginalTesting() { setMongoPort(originalMongoPort); } public static String getDocDbUserName() { return System.getenv(DOC_DB_USER_NAME_PROPERTY); } public static String getDocDbPassword() { return System.getenv(DOC_DB_PASSWORD_PROPERTY); } private static Integer getInteger(final String value, final Integer defaultValue) { try { return Integer.parseInt(value); } catch (NumberFormatException e) { return defaultValue; } } }
4,558
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbFlapDoodleExtension.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import java.io.IOException; /** * Jupiter extension for creating and configuring a FlapDoodle MongoDb instance with no replica set. */ public class DocumentDbFlapDoodleExtension extends DocumentDbFlapDoodleExtensionBase { private static final String BASIC_FLAPDOODLE_INFO = "basic-flapdoodle-info"; private static class FlapDoodleHolder implements ServerHolder { FlapDoodleHolder() { } @Override public Integer getPort() { return getMongoPort(); } @Override public void start() throws IOException { startMongoDbInstance(true); } @Override public void stop() { stopMongoDbInstance(); } } @Override protected String getIdentifier() { return BASIC_FLAPDOODLE_INFO; } @Override protected ServerHolder createHolder() { return new FlapDoodleHolder(); } }
4,559
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbTestEnvironment.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import org.bson.BsonDocument; import software.amazon.documentdb.jdbc.DocumentDbMetadataScanMethod; import java.sql.SQLException; public interface DocumentDbTestEnvironment { /** * Starts the test environment and preforms any initialization. * * @return true, if the environment starts a new instance, otherwise false, if the * environment was already started. */ boolean start() throws Exception; /** * Stops the test environment and cleans up any temporary collections. * * @return true, if the environment stops an instance, otherwise false, if the * environment was already stopped or could not be stopped. */ boolean stop() throws Exception; /** * Creates a new collection name for the database. * * @param isTemporary an indicator of whether the collection is temporary. If true, then the * collection will be removed when the environment is stopped. * @return a new collection name. */ String newCollectionName(final boolean isTemporary); /** * Gets the database name. * @return the database name. */ String getDatabaseName(); /** * Gets the JDBC connection string for this environment. * * @return the JDBC connection string. */ String getJdbcConnectionString(); /** * Gets the JDBC connection string for the restricted user. * * @return the JDBC connection string. */ String getRestrictedUserConnectionString(); /** * Creates a new {@link MongoClient} object with default username and password. * * @return a new {@link MongoClient} object. */ MongoClient createMongoClient() throws SQLException; /** * Prepares simple consistent data records for the given collections. * @param collection the collection to populate. * @param recordCount the number or records to add. */ void prepareSimpleConsistentData( final MongoCollection<BsonDocument> collection, final int recordCount); /** * Inserts array of documents into target collection. * @param collectionName the collection to populate. * @param documents the documents to add. */ void insertBsonDocuments( final String collectionName, final BsonDocument[] documents) throws SQLException; /** * Gets the JDBC connection string for this environment. * @param scanMethod scan method to add as connection parameter to default string. * @return the JDBC connection string. */ String getJdbcConnectionString(final DocumentDbMetadataScanMethod scanMethod); }
4,560
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbMongoTestEnvironment.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import de.flapdoodle.embed.mongo.MongoShellStarter; import de.flapdoodle.embed.mongo.MongodExecutable; import de.flapdoodle.embed.mongo.MongodProcess; import de.flapdoodle.embed.mongo.MongodStarter; import de.flapdoodle.embed.mongo.config.Defaults; import de.flapdoodle.embed.mongo.config.ImmutableMongoShellConfig; import de.flapdoodle.embed.mongo.config.ImmutableMongodConfig.Builder; import de.flapdoodle.embed.mongo.config.MongoCmdOptions; import de.flapdoodle.embed.mongo.config.MongoShellConfig; import de.flapdoodle.embed.mongo.config.MongodConfig; import de.flapdoodle.embed.mongo.config.Net; import de.flapdoodle.embed.mongo.distribution.Version.Main; import de.flapdoodle.embed.mongo.packageresolver.Command; import de.flapdoodle.embed.process.config.RuntimeConfig; import de.flapdoodle.embed.process.config.process.ProcessOutput; import de.flapdoodle.embed.process.io.LogWatchStreamProcessor; import de.flapdoodle.embed.process.io.NamedOutputStreamProcessor; import de.flapdoodle.embed.process.io.Processors; import de.flapdoodle.embed.process.io.StreamProcessor; import de.flapdoodle.embed.process.runtime.Network; import org.apache.commons.lang3.StringUtils; import org.bson.BsonType; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.UUID; import static de.flapdoodle.embed.process.io.Processors.namedConsole; import static org.apache.logging.log4j.core.util.Assert.isEmpty; public class DocumentDbMongoTestEnvironment extends DocumentDbAbstractTestEnvironment { private static final long INIT_TIMEOUT_MS = 30000; private static final String USER_ADDED_TOKEN = "Successfully added user"; private static final String INTEGRATION_DATABASE_NAME = "integration"; private static final String MONGO_LOCAL_HOST = "localhost"; private static final String MONGO_USERNAME = "mongo"; private static final boolean USE_AUTHENTICATION_DEFAULT = true; private static final String ADMIN_USERNAME = "admin"; private static final String ADMIN_PASSWORD = "admin"; private static final String RESTRICTED_USERNAME = "restrictedUser"; private final boolean enableAuthentication; private final String databaseName; private MongodConfig mongoConfig = null; private MongodExecutable mongoExecutable = null; private MongodProcess mongoProcess = null; private int port = -1; /** * Creates a new {@link DocumentDbMongoTestEnvironment} with defaults. */ DocumentDbMongoTestEnvironment() { this(INTEGRATION_DATABASE_NAME, MONGO_USERNAME, getRandomPassword(), USE_AUTHENTICATION_DEFAULT); } /** * Creates a new {@link DocumentDbMongoTestEnvironment}. * * @param databaseName the name of the database the user should have access to. * @param username the user name. * @param password the password for the user. * @param enableAuthentication indicator of whether to enable authentication. */ DocumentDbMongoTestEnvironment( final String databaseName, final String username, final String password, final boolean enableAuthentication) { super(MONGO_LOCAL_HOST, username, password, RESTRICTED_USERNAME, "tls=false"); this.enableAuthentication = enableAuthentication; this.databaseName = databaseName; } @Override protected boolean startEnvironment() throws Exception { if (isStarted() || mongoProcess != null) { return false; } port = Network.freeServerPort(Network.getLocalHost()); final MongoCmdOptions cmdOptions = MongoCmdOptions.builder() .auth(enableAuthentication) .build(); final MongodStarter starter = MongodStarter.getDefaultInstance(); final Builder builder = MongodConfig.builder() .version(Main.V4_0) .net(new Net(port, Network.localhostIsIPv6())) .cmdOptions(cmdOptions); mongoConfig = builder.build(); mongoExecutable = starter.prepare(mongoConfig); mongoProcess = mongoExecutable.start(); addAdmin(); createUser(databaseName, "dbOwner", getUsername(), getPassword()); createUser(databaseName, "read", getRestrictedUsername(), getPassword()); return true; } @Override protected boolean stopEnvironment() { if (!isStarted() || mongoExecutable == null) { return false; } mongoProcess.stop(); mongoExecutable.stop(); mongoExecutable = null; mongoProcess = null; port = -1; return true; } @Override protected int getPort() { return port; } @Override protected boolean isBsonTypeCompatible(final BsonType bsonType) { // All BsonType are compatible with Mongo server. return true; } @Override public String getDatabaseName() { return databaseName; } @Override public String toString() { return DocumentDbMongoTestEnvironment.class.getSimpleName() + "{" + " databaseName='" + databaseName + '\'' + ", username='" + getUsername() + '\'' + ", port=" + port + ", enableAuthentication=" + enableAuthentication + " }"; } /** * Creates a user to the admin database with dbOwner role on another database. * @param databaseName the name of database to grant access for the user. * @param username the user name to create. * @param password the password for the user. * @throws IOException if unable to start the mongo shell process. */ private void createUser( final String databaseName, final String role, final String username, final String password) throws IOException { final String[] roles = new String[] { "{\"db\":\"" + databaseName + "\",\"role\":\"" + role + "\"}" }; final String scriptText = StringUtils.join(String.format( "db = db.getSiblingDB('%s'); " + "db.createUser({\"user\":\"%s\",\"pwd\":\"%s\",\"roles\":[%s]});%n" + "db.getUser('%s');", ADMIN_DATABASE, username, password, StringUtils.join(roles, ","), username), ""); runScriptAndWait( scriptText, new String[]{"already exists", "failed to load", "login failed"}, ADMIN_USERNAME, ADMIN_PASSWORD); } private void addAdmin() throws IOException { final String scriptText = StringUtils.join( String.format("db.createUser(" + "{\"user\":\"%s\",\"pwd\":\"%s\"," + "\"roles\":[" + "\"root\"," + "{\"role\":\"userAdmin\",\"db\":\"admin\"}," + "{\"role\":\"dbAdmin\",\"db\":\"admin\"}," + "{\"role\":\"userAdminAnyDatabase\",\"db\":\"admin\"}," + "{\"role\":\"dbAdminAnyDatabase\",\"db\":\"admin\"}," + "{\"role\":\"clusterAdmin\",\"db\":\"admin\"}," + "{\"role\":\"dbOwner\",\"db\":\"admin\"}," + "]});%n", ADMIN_USERNAME, ADMIN_PASSWORD)); runScriptAndWait(scriptText, new String[]{"couldn't add user", "failed to load", "login failed"}, null, null); } private void runScriptAndWait( final String scriptText, final String[] failures, final String username, final String password) throws IOException { final StreamProcessor mongoOutput; if (!isEmpty(DocumentDbMongoTestEnvironment.USER_ADDED_TOKEN)) { mongoOutput = new MongoLogWatchStreamProcessor( DocumentDbMongoTestEnvironment.USER_ADDED_TOKEN, (failures != null) ? new HashSet<>(Arrays.asList(failures)) : Collections.emptySet(), namedConsole("[mongo shell output]")); } else { mongoOutput = new NamedOutputStreamProcessor("[mongo shell output]", Processors.console()); } final RuntimeConfig runtimeConfig = Defaults.runtimeConfigFor(Command.Mongo) .processOutput(ProcessOutput.builder() .output(mongoOutput) .error(Processors.namedConsole("[mongo shell error]")) .commands(Processors.console()) .build()) .build(); final MongoShellStarter starter = MongoShellStarter.getInstance(runtimeConfig); final File scriptFile = writeTmpScriptFile(scriptText); final ImmutableMongoShellConfig.Builder builder = MongoShellConfig.builder(); if (!isEmpty(DocumentDbAbstractTestEnvironment.ADMIN_DATABASE)) { builder.dbName(DocumentDbAbstractTestEnvironment.ADMIN_DATABASE); } if (!isEmpty(username)) { builder.userName(username); } if (!isEmpty(password)) { builder.password(password); } starter.prepare(builder .scriptName(scriptFile.getAbsolutePath()) .version(mongoConfig.version()) .net(mongoConfig.net()) .build()).start(); if (mongoOutput instanceof MongoLogWatchStreamProcessor) { ((MongoLogWatchStreamProcessor) mongoOutput).waitForResult(INIT_TIMEOUT_MS); } } private static File writeTmpScriptFile(final String scriptText) throws IOException { final File scriptFile = File.createTempFile("tempFile", ".js"); scriptFile.deleteOnExit(); final Writer writer = new OutputStreamWriter(new FileOutputStream(scriptFile), StandardCharsets.UTF_8); final BufferedWriter bw = new BufferedWriter(writer); bw.write(scriptText); bw.close(); return scriptFile; } private static String getRandomPassword() { return UUID.randomUUID().toString().replaceAll("[-]", ""); } /** * Watches the mongo or mongod output stream. */ private static class MongoLogWatchStreamProcessor extends LogWatchStreamProcessor { private final Object mutex = new Object(); private final String success; private final Set<String> failures; private volatile boolean found = false; /** * Creates a new MongoLogWatchStreamProcessor * @param success the string token to watch for to indicate success. * @param failures the set of strings to watch for to indicate failure. * @param destination the stream processor. */ public MongoLogWatchStreamProcessor( final String success, final Set<String> failures, final StreamProcessor destination) { super(success, failures, destination); this.success = success; this.failures = failures; } @Override public void process(final String block) { if (containsSuccess(block) || containsFailure(block)) { synchronized (mutex) { found = true; mutex.notifyAll(); } } else { super.process(block); } } private boolean containsSuccess(final String block) { return block.contains(success); } private boolean containsFailure(final String block) { for (String failure : failures) { if (block.contains(failure)) { return true; } } return false; } /** * Waits for result for a result up to as long as given timeout. * @param timeout the timeout when waiting for a result. */ public void waitForResult(final long timeout) { synchronized (mutex) { try { while (!found) { mutex.wait(timeout); } } catch (InterruptedException e) { e.printStackTrace(); } } } } }
4,561
0
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common
Create_ds/amazon-documentdb-jdbc-driver/src/testFixtures/java/software/amazon/documentdb/jdbc/common/test/DocumentDbTestEnvironmentFactory.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.common.test; import com.google.common.collect.ImmutableList; import java.util.Arrays; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.isNullOrWhitespace; /** * Creates test environments. * * Note: Ensure the values set in the gradle.build match these enumeration values exactly. */ public class DocumentDbTestEnvironmentFactory { private static volatile ImmutableList<DocumentDbTestEnvironment> configuredEnvironments = null; /** * Gets the MongoDB 4.0 test environment. * * @return the {@link DocumentDbTestEnvironment} for the MongoDB 4.0 server. */ public static DocumentDbTestEnvironment getMongoDb40Environment() { return DocumentDbTestEnvironmentType.MONGODB40_FLAPDOODLE.getEnvironment(); } /** * Gets the DocumentDB 4.0 via SSH tunnel test environment. * * @return the {@link DocumentDbTestEnvironment} for the DocumentDB 4.0 server. */ public static DocumentDbTestEnvironment getDocumentDb40SshTunnelEnvironment() { return DocumentDbTestEnvironmentType.DOCUMENTDB40_SSH_TUNNEL.getEnvironment(); } /** * Gets the list of configured test environments using their default settings. * * @return a list of {@link DocumentDbTestEnvironment} for all configured test environments. */ public static ImmutableList<DocumentDbTestEnvironment> getConfiguredEnvironments() { if (configuredEnvironments == null) { buildConfiguredEnvironments(); } return configuredEnvironments; } private static synchronized void buildConfiguredEnvironments() { if (configuredEnvironments == null) { final String environmentNames = getEnvironmentNames(); final ImmutableList.Builder<DocumentDbTestEnvironment> builder = ImmutableList.builder(); Arrays.stream(environmentNames.split("[,]")) .distinct() .forEach(e -> builder.add( DocumentDbTestEnvironmentType.valueOf(e).getEnvironment())); configuredEnvironments = builder.build(); } } private static String getEnvironmentNames() { String environmentNames = System.getenv("CONFIGURED_ENVIRONMENTS"); if (isNullOrWhitespace(environmentNames)) { environmentNames = DocumentDbTestEnvironmentType.MONGODB40_FLAPDOODLE.name(); } return environmentNames; } private enum DocumentDbTestEnvironmentType { MONGODB40_FLAPDOODLE(new DocumentDbMongoTestEnvironment()), DOCUMENTDB40_SSH_TUNNEL(new DocumentDbDocumentDbTestEnvironment()); private final DocumentDbTestEnvironment environment; DocumentDbTestEnvironmentType(final DocumentDbTestEnvironment environment) { this.environment = environment; } DocumentDbTestEnvironment getEnvironment() { return environment; } } }
4,562
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbDatabaseMetaData.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.DatabaseMetaData; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.sql.Date; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.isNullOrWhitespace; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildAttributesColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildCatalogsColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildColumnPrivilegesColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildColumnsColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildImportedKeysColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildPrimaryKeysColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildProceduresColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildSchemasColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildTableTypesColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildTablesColumnMetaData; import static software.amazon.documentdb.jdbc.DocumentDbDatabaseMetaDataResultSets.buildTypeInfoColumnMetaData; /** * DocumentDb implementation of DatabaseMetaData. */ public class DocumentDbDatabaseMetaData extends DatabaseMetaData implements java.sql.DatabaseMetaData { private static final int BASE_10 = 10; private static final Map<JdbcType, Integer> TYPE_COLUMN_SIZE_MAP; private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbDatabaseMetaData.class); private static final char ESCAPE_CHAR = '\\'; private final DocumentDbDatabaseSchemaMetadata databaseMetadata; private final DocumentDbConnectionProperties properties; static { TYPE_COLUMN_SIZE_MAP = new HashMap<>(); for (JdbcType jdbcType : JdbcType.values()) { switch (jdbcType) { case DECIMAL: case NUMERIC: TYPE_COLUMN_SIZE_MAP.put(jdbcType, 646456995); // precision + "-.".length()} break; case FLOAT: case REAL: case DOUBLE: TYPE_COLUMN_SIZE_MAP.put(jdbcType, 23); // String.valueOf(-Double.MAX_VALUE).length(); break; case BIGINT: TYPE_COLUMN_SIZE_MAP.put(jdbcType, 20); // decimal precision + "-".length(); break; case INTEGER: TYPE_COLUMN_SIZE_MAP.put(jdbcType, 11); // decimal precision + "-".length(); break; case SMALLINT : TYPE_COLUMN_SIZE_MAP.put(jdbcType, 6); // decimal precision + "-".length(); break; case TINYINT : TYPE_COLUMN_SIZE_MAP.put(jdbcType, 4); break; case VARBINARY: case VARCHAR: case NVARCHAR: TYPE_COLUMN_SIZE_MAP.put(jdbcType, 65536); break; case DATE: TYPE_COLUMN_SIZE_MAP.put(jdbcType, new Date(Long.MAX_VALUE).toString().length()); break; case TIME: TYPE_COLUMN_SIZE_MAP.put(jdbcType, new Time(Long.MAX_VALUE).toString().length()); break; case TIMESTAMP: TYPE_COLUMN_SIZE_MAP.put(jdbcType, new Timestamp(Long.MAX_VALUE).toString().length()); break; default: TYPE_COLUMN_SIZE_MAP.put(jdbcType, 0); break; } } } /** * DocumentDbDatabaseMetaData constructor, initializes super class. * * @param connection the connection. * @param databaseMetadata the underlying database metadata. * @param properties the connection properties. */ DocumentDbDatabaseMetaData( final DocumentDbConnection connection, final DocumentDbDatabaseSchemaMetadata databaseMetadata, final DocumentDbConnectionProperties properties) { super(connection); this.databaseMetadata = databaseMetadata; this.properties = properties; } // TODO: Go through and implement these functions @Override public String getURL() { return DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME + properties.buildSanitizedConnectionString(); } @Override public String getUserName() { return properties.getUser(); } @Override public String getDatabaseProductName() { return "DocumentDB"; } @Override public String getDatabaseProductVersion() { // TODO: Get this from underlying server. return "4.0"; } @Override public String getDriverName() { return "DocumentDB JDBC Driver"; } @Override public int getDriverMajorVersion() { return DocumentDbDriver.DRIVER_MAJOR_VERSION; } @Override public int getDriverMinorVersion() { return DocumentDbDriver.DRIVER_MINOR_VERSION; } @Override public String getDriverVersion() { return DocumentDbDriver.DRIVER_VERSION; } @Override public String getSQLKeywords() { return ""; } @Override public String getNumericFunctions() { return ""; } @Override public String getStringFunctions() { return ""; } @Override public String getSystemFunctions() { return ""; } @Override public String getTimeDateFunctions() { return ""; } @Override public String getSearchStringEscape() { return "\\"; } @Override public String getExtraNameCharacters() { return ""; } @Override public String getCatalogTerm() { return "catalog"; } @Override public String getCatalogSeparator() { return "."; } @Override public int getMaxRowSize() { return 0; // Indicate either no limit or unknown. } @Override public ResultSet getProcedures(final String catalog, final String schemaPattern, final String procedureNamePattern) { final List<List<Object>> metaData = new ArrayList<>(); return new DocumentDbListResultSet( null, buildProceduresColumnMetaData(properties.getDatabase()), metaData); } @Override public ResultSet getTables(final String catalog, final String schemaPattern, final String tableNamePattern, final String[] types) { final List<List<Object>> metaData = new ArrayList<>(); // ASSUMPTION: We're only supporting tables. if (isNullOrWhitespace(catalog) && (types == null || types.length == 0 || Arrays.stream(types) .anyMatch(s -> isNullOrWhitespace(s) || s.equals("TABLE")))) { if (schemaPattern == null || properties.getDatabase().matches(convertPatternToRegex(schemaPattern))) { addTablesForSchema(tableNamePattern, metaData); } } return new DocumentDbListResultSet( null, buildTablesColumnMetaData(properties.getDatabase()), metaData); } private void addTablesForSchema(final String tableNamePattern, final List<List<Object>> metaData) { final String regexTableNamePattern = convertPatternToRegex(tableNamePattern); for (String tableName : databaseMetadata.getTableSchemaMap().keySet()) { if (tableNamePattern == null || tableName.matches(regexTableNamePattern)) { addTableEntry(metaData, tableName); } } } private void addTableEntry(final List<List<Object>> metaData, final String tableName) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. TABLE_TYPE String => table type. Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS", "SYNONYM". // 5. REMARKS String => explanatory comment on the table // 6. TYPE_CAT String => the types catalog (may be null) // 7. TYPE_SCHEM String => the types schema (may be null) // 8. TYPE_NAME String => type name (may be null) // 9. SELF_REFERENCING_COL_NAME String => name of the designated "identifier" column of a typed table (may be null) // 10. REF_GENERATION String => specifies how values in SELF_REFERENCING_COL_NAME are created. Values are "SYSTEM", "USER", "DERIVED". (may be null) final List<Object> row = new ArrayList<>(Arrays.asList( null, properties.getDatabase(), tableName, "TABLE", null, null, null, null, null, null)); metaData.add(row); } @Override public ResultSet getSchemas() { return getSchemas(null, null); } @Override public ResultSet getCatalogs() { final List<List<Object>> metaData = new ArrayList<>(); // 1. TABLE_CAT String => catalog name // Note: return NO records to indicate no catalogs. return new DocumentDbListResultSet( null, buildCatalogsColumnMetaData(properties.getDatabase()), metaData); } @Override public ResultSet getTableTypes() { final List<List<Object>> metaData = new ArrayList<>(); // ASSUMPTION: We're only supporting TABLE types. for (String tableType : Collections.singletonList("TABLE")) { // 1. TABLE_TYPE String => table type. Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS", "SYNONYM". metaData.add(Collections.singletonList(tableType)); } return new DocumentDbListResultSet( null, buildTableTypesColumnMetaData(properties.getDatabase()), metaData); } @Override public ResultSet getColumns(final String catalog, final String schemaPattern, final String tableNamePattern, final String columnNamePattern) throws SQLException { final List<List<Object>> metaData = new ArrayList<>(); if (isNullOrWhitespace(catalog)) { if (schemaPattern == null || properties.getDatabase().matches(convertPatternToRegex(schemaPattern))) { addColumnsForSchema(tableNamePattern, columnNamePattern, metaData); } } return new DocumentDbListResultSet( null, buildColumnsColumnMetaData(properties.getDatabase()), metaData); } private void addColumnsForSchema(final String tableNamePattern, final String columnNamePattern, final List<List<Object>> metaData) throws SQLException { if ("%".equals(tableNamePattern)) { // Optimized to get the whole map at once. for (DocumentDbSchemaTable table : databaseMetadata.getTableSchemaMap().values()) { addColumnsForTable(columnNamePattern, metaData, table); } } else { final String regexTableNamePattern = convertPatternToRegex(tableNamePattern); for (String tableName : databaseMetadata.getTableSchemaMap().keySet()) { if (tableName.matches(regexTableNamePattern)) { final DocumentDbSchemaTable table = databaseMetadata .getTableSchemaMap().get(tableName); if (table == null) { // This will occur if the table schema is deleted after retrieving the // database schema. throw SqlError.createSQLException( LOGGER, SqlState.DATA_EXCEPTION, SqlError.INCONSISTENT_SCHEMA, tableName); } addColumnsForTable(columnNamePattern, metaData, table); } } } } private void addColumnsForTable(final String columnNamePattern, final List<List<Object>> metaData, final DocumentDbSchemaTable table) { final String regexColumnPattern = convertPatternToRegex(columnNamePattern); for (DocumentDbSchemaColumn column : table.getColumnMap().values()) { if (columnNamePattern == null || column.getSqlName().matches(regexColumnPattern)) { addColumnEntry(metaData, table, column); } } } /** * Warning: if any entry becomes used/unused, reflection of that change in the * ODBC driver is required as well. Files to be changed in the ODBC driver: * function ColumnMetadataQuery::GetColumn in namespace query * in column_metadata_query.cpp */ private void addColumnEntry(final List<List<Object>> metaData, final DocumentDbSchemaTable table, final DocumentDbSchemaColumn column) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. DATA_TYPE int => SQL type from java.sql.Types // 6. TYPE_NAME String => Data source dependent type name, for a UDT the type name is fully qualified // 7. COLUMN_SIZE int => column size. // 8. BUFFER_LENGTH is not used. // 9. DECIMAL_DIGITS int => the number of fractional digits. Null is returned for data types where DECIMAL_DIGITS is not applicable. // 10. NUM_PREC_RADIX int => Radix (typically either 10 or 2) // 11. NULLABLE int => is NULL allowed. // columnNoNulls - might not allow NULL values // columnNullable - definitely allows NULL values // columnNullableUnknown - nullability unknown // 12. REMARKS String => comment describing column (may be null) // 13. COLUMN_DEF String => default value for the column, which should be interpreted as a string when the value is enclosed in single quotes (may be null) // 14. SQL_DATA_TYPE int => unused // 15. SQL_DATETIME_SUB int => unused // 16. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column // 17. ORDINAL_POSITION int => index of column in table (starting at 1) // 18. IS_NULLABLE String => ISO rules are used to determine the nullability for a column. // YES --- if the column can include NULLs // NO --- if the column cannot include NULLs // empty string --- if the nullability for the column is unknown // 19. SCOPE_CATALOG String => catalog of table that is the scope of a reference attribute (null if DATA_TYPE isn't REF) // 20. SCOPE_SCHEMA String => schema of table that is the scope of a reference attribute (null if the DATA_TYPE isn't REF) // 21. SCOPE_TABLE String => table name that this the scope of a reference attribute (null if the DATA_TYPE isn't REF) // 22. SOURCE_DATA_TYPE short => source type of a distinct type or user-generated Ref type, SQL type from java.sql.Types (null if DATA_TYPE isn't DISTINCT or user-generated REF) // 23. IS_AUTOINCREMENT String => Indicates whether this column is auto incremented // YES --- if the column is auto incremented // NO --- if the column is not auto incremented // empty string --- if it cannot be determined whether the column is auto incremented // 24. IS_GENERATEDCOLUMN String => Indicates whether this is a generated column // YES --- if this a generated column // NO --- if this not a generated column // empty string --- if it cannot be determined whether this is a generated column if (column.getSqlType() == JdbcType.JAVA_OBJECT || column.getSqlType() == JdbcType.ARRAY) { return; } final List<Object> row = new ArrayList<>(Arrays.asList( null, // TABLE_CAT properties.getDatabase(), // TABLE_SCHEM table.getSqlName(), // TABLE_NAME column.getSqlName(), // COLUMN_NAME column.getSqlType().getJdbcType(), //DATA_TYPE column.getSqlType().name(), // TYPE_NAME TYPE_COLUMN_SIZE_MAP.get(column.getSqlType()), // COLUMN_SIZE null, // DECIMAL_DIGITS null, // NUM_PREC_RADIX null, // BUFFER_LENGTH column.isPrimaryKey()// NULLABLE ? ResultSetMetaData.columnNoNulls : ResultSetMetaData.columnNullable, null, // REMARKS null, // COLUMN_DEF null, // SQL_DATA_TYPE null, // SQL_DATETIME_SUB getCharOctetLength(column), // CHAR_OCTET_LENGTH column.getIndex(table).orElse(null), // ORDINAL_POSITION (one-based) column.isPrimaryKey() ? "NO" : "YES", // IS_NULLABLE null, // SCOPE_CATALOG null, // SCOPE_SCHEMA null, // SCOPE_TABLE null, // SOURCE_DATA_TYPE "NO", // IS_AUTOINCREMENT column.isIndex() ? "YES" : "NO" // IS_GENERATEDCOLUMN )); metaData.add(row); } private static Integer getCharOctetLength(final DocumentDbSchemaColumn column) { switch (column.getSqlType()) { case CHAR: case NCHAR: case VARCHAR: case NVARCHAR: case LONGVARCHAR: case LONGNVARCHAR: return TYPE_COLUMN_SIZE_MAP.get(column.getSqlType()) * 4; case BINARY: case VARBINARY: case LONGVARBINARY: return TYPE_COLUMN_SIZE_MAP.get(column.getSqlType()); default: return null; } } @Override public ResultSet getColumnPrivileges(final String catalog, final String schema, final String table, final String columnNamePattern) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. GRANTOR String => grantor of access (may be null) // 6. GRANTEE String => grantee of access // 7. PRIVILEGE String => name of access (SELECT, INSERT, UPDATE, REFRENCES, ...) // 8. IS_GRANTABLE String => "YES" if grantee is permitted to grant to others; "NO" if not; null if unknown final List<List<Object>> metaData = new ArrayList<>(); return new DocumentDbListResultSet( null, buildColumnPrivilegesColumnMetaData(properties.getDatabase()), metaData); } @Override public ResultSet getBestRowIdentifier(final String catalog, final String schema, final String table, final int scope, final boolean nullable) throws SQLException { // TODO: Implement throw new SQLFeatureNotSupportedException(); } @Override public ResultSet getPrimaryKeys(final String catalog, final String schema, final String table) throws SQLException { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. KEY_SEQ short => sequence number within primary key( a value of 1 represents the first column of the primary key, a value of 2 would represent the second column within the primary key). // 6. PK_NAME String => primary key name (may be null) final List<List<Object>> metaData = new ArrayList<>(); if (schema == null || properties.getDatabase().equals(schema)) { for (String tableName : databaseMetadata.getTableSchemaMap().keySet()) { if (table == null || tableName.equals(table)) { final DocumentDbSchemaTable metadataTable = databaseMetadata .getTableSchemaMap().get(tableName); if (metadataTable == null) { // This will occur if the table schema is deleted after retrieving the // database schema. throw SqlError.createSQLException( LOGGER, SqlState.DATA_EXCEPTION, SqlError.INCONSISTENT_SCHEMA, tableName); } for (DocumentDbSchemaColumn column : metadataTable.getColumnMap().values()) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. KEY_SEQ short => sequence number within primary key // (a value of 1 represents the first column of the primary key, a // value of 2 would represent the second column within the primary key). // 6. PK_NAME String => primary key name (may be null) if (column.isPrimaryKey()) { final List<Object> row = new ArrayList<>(Arrays.asList( null, // TABLE_CAT properties.getDatabase(), // TABLE_SCHEM metadataTable.getSqlName(), // TABLE_NAME column.getSqlName(), // COLUMN_NAME column.getPrimaryKeyIndex(metadataTable).orElse(0), // KEY_SEQ null // PK_NAME )); metaData.add(row); } } } } } return new DocumentDbListResultSet( null, buildPrimaryKeysColumnMetaData(properties.getDatabase()), metaData); } @Override public ResultSet getImportedKeys(final String catalog, final String schema, final String table) throws SQLException { final List<List<Object>> metaData = new ArrayList<>(); if (isNullOrWhitespace(catalog)) { if (schema == null || properties.getDatabase().equals(schema)) { addImportedKeysForSchema(table, metaData); } } return new DocumentDbListResultSet( null, buildImportedKeysColumnMetaData(properties.getDatabase()), metaData); } private void addImportedKeysForSchema(final String table, final List<List<Object>> metaData) throws SQLException { final String regexTablePattern = convertPatternToRegex(table); for (String tableName : databaseMetadata.getTableSchemaMap().keySet()) { if (table == null || tableName.matches(regexTablePattern)) { final DocumentDbSchemaTable schemaTable = databaseMetadata .getTableSchemaMap().get(tableName); if (schemaTable == null) { // This will occur if the table schema is deleted after retrieving the // database schema. throw SqlError.createSQLException( LOGGER, SqlState.DATA_EXCEPTION, SqlError.INCONSISTENT_SCHEMA, tableName); } addImportedKeysForTable(metaData, schemaTable, schemaTable); } } } private void addImportedKeysForTable(final List<List<Object>> metaData, final DocumentDbSchemaTable schemaTable, final DocumentDbSchemaTable metadataTable) { for (DocumentDbSchemaColumn column : metadataTable.getColumnMap().values()) { addImportedKey(metaData, schemaTable, column); } } private void addImportedKey(final List<List<Object>> metaData, final DocumentDbSchemaTable schemaTable, final DocumentDbSchemaColumn column) { // 1. PKTABLE_CAT String => primary key table catalog being imported (may be null) // 2. PKTABLE_SCHEM String => primary key table schema being imported (may be null) // 3. PKTABLE_NAME String => primary key table name being imported // 4. PKCOLUMN_NAME String => primary key column name being imported // 5. FKTABLE_CAT String => foreign key table catalog (may be null) // 6. FKTABLE_SCHEM String => foreign key table schema (may be null) // 7. FKTABLE_NAME String => foreign key table name // 8. FKCOLUMN_NAME String => foreign key column name // 9. KEY_SEQ short => sequence number within a foreign key // (a value of 1 represents the first column of the foreign key, a value of 2 would represent // the second column within the foreign key). // 10. UPDATE_RULE short => What happens to a foreign key when the primary key is updated: // importedNoAction - do not allow update of primary key if it has been imported // importedKeyCascade - change imported key to agree with primary key update // importedKeySetNull - change imported key to NULL if its primary key has been updated // importedKeySetDefault - change imported key to default values if its primary key has been updated // importedKeyRestrict - same as importedKeyNoAction (for ODBC 2.x compatibility) // 11. DELETE_RULE short => What happens to the foreign key when primary is deleted. // importedKeyNoAction - do not allow delete of primary key if it has been imported // importedKeyCascade - delete rows that import a deleted key // importedKeySetNull - change imported key to NULL if its primary key has been deleted // importedKeyRestrict - same as importedKeyNoAction (for ODBC 2.x compatibility) // importedKeySetDefault - change imported key to default if its primary key has been deleted // 12. FK_NAME String => foreign key name (may be null) // 13. PK_NAME String => primary key name (may be null) // 14. DEFERRABILITY short => can the evaluation of foreign key constraints be deferred until commit // importedKeyInitiallyDeferred - see SQL92 for definition // importedKeyInitiallyImmediate - see SQL92 for definition // importedKeyNotDeferrable - see SQL92 for definition if (column.getForeignKeyTableName() != null && schemaTable != null) { // ASSUMPTION: This can only be done because we only reference // the base table in a foreign key relationship. final List<Object> row = new ArrayList<>(Arrays.asList( null, // PKTABLE_CAT properties.getDatabase(), // PKTABLE_SCHEM column.getForeignKeyTableName(), // PKTABLE_NAME column.getForeignKeyColumnName(), // PKCOLUMN_NAME null, // FKTABLE_CAT properties.getDatabase(), // FKTABLE_SCHEM schemaTable.getSqlName(), // FKTABLE_NAME column.getSqlName(), // FKCOLUMN_NAME metaData.size() + 1, // KEY_SEQ importedKeyNoAction, // UPDATE_RULE importedKeyNoAction, // DELETE_RULE null, // FK_NAME null, // PK_NAME importedKeyInitiallyDeferred // DEFERRABILITY )); metaData.add(row); } } @Override public ResultSet getTypeInfo() { return new DocumentDbListResultSet( null, buildTypeInfoColumnMetaData(), new ArrayList<>(Arrays.asList( new ArrayList<>(Arrays.asList( JdbcType.BOOLEAN.name(), // TYPE_NAME JdbcType.BOOLEAN.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.BOOLEAN), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.TINYINT.name(), // TYPE_NAME Types.TINYINT, // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.TINYINT), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.SMALLINT.name(), // TYPE_NAME JdbcType.SMALLINT.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.SMALLINT), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.INTEGER.name(), // TYPE_NAME JdbcType.INTEGER.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.INTEGER), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.BIGINT.name(), // TYPE_NAME JdbcType.BIGINT.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.BIGINT), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.REAL.name(), // TYPE_NAME JdbcType.REAL.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.REAL), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.FLOAT.name(), // TYPE_NAME JdbcType.FLOAT.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.FLOAT), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.DOUBLE.name(), // TYPE_NAME JdbcType.DOUBLE.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.DOUBLE), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.DECIMAL.name(), // TYPE_NAME JdbcType.DECIMAL.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.DECIMAL), // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE false, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.CHAR.name(), // TYPE_NAME JdbcType.CHAR.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.CHAR), // PRECISION "'", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE true, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.VARCHAR.name(), // TYPE_NAME JdbcType.VARCHAR.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.VARCHAR), // PRECISION "'", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE true, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.NCHAR.name(), // TYPE_NAME JdbcType.NCHAR.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.NCHAR), // PRECISION "'", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE true, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.NVARCHAR.name(), // TYPE_NAME JdbcType.NVARCHAR.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.NVARCHAR), // PRECISION "'", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE true, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.BINARY.name(), // TYPE_NAME JdbcType.BINARY.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.BINARY), // PRECISION "x'", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.VARBINARY.name(), // TYPE_NAME JdbcType.VARBINARY.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.VARBINARY), // PRECISION "x'", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.DATE.name(), // TYPE_NAME JdbcType.DATE.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.DATE), // PRECISION "DATE '", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.TIME.name(), // TYPE_NAME JdbcType.TIME.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.TIME), // PRECISION "TIME '", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.TIMESTAMP.name(), // TYPE_NAME JdbcType.TIMESTAMP.getJdbcType(), // DATA_TYPE TYPE_COLUMN_SIZE_MAP.get(JdbcType.TIMESTAMP), // PRECISION "TIMESTAMP '", // LITERAL_PREFIX "'", // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )), new ArrayList<>(Arrays.asList( JdbcType.NULL.name(), // TYPE_NAME JdbcType.NULL.getJdbcType(), // DATA_TYPE null, // PRECISION null, // LITERAL_PREFIX null, // LITERAL_SUFFIX null, // CREATE_PARAMS ResultSetMetaData.columnNullable, // NULLABLE false, // CASE_SENSITIVE DatabaseMetaData.typeSearchable, // SEARCHABLE true, // UNSIGNED_ATTRIBUTE false, // FIXED_PREC_SCALE false, // AUTO_INCREMENT null, // LOCAL_TYPE_NAME 0, // MINIMUM_SCALE 0, // MAXIMUM_SCALE null, // SQL_DATA_TYPE (unused) null, // SQL_DATETIME_SUB (unused) BASE_10 // NUM_PREC_RADIX )) )) ); } @Override public ResultSet getIndexInfo(final String catalog, final String schema, final String table, final boolean unique, final boolean approximate) throws SQLException { throw new SQLFeatureNotSupportedException(); } @Override public ResultSet getAttributes(final String catalog, final String schemaPattern, final String typeNamePattern, final String attributeNamePattern) { // 1. TYPE_CAT String => type catalog (may be null) // 2. TYPE_SCHEM String => type schema (may be null) // 3. TYPE_NAME String => type name // 4. ATTR_NAME String => attribute name // 5. DATA_TYPE int => attribute type SQL type from java.sql.Types // 6. ATTR_TYPE_NAME String => Data source dependent type name. For a UDT, the type name is fully qualified. For a REF, the type name is fully qualified and represents the target type of the reference type. // 7. ATTR_SIZE int => column size. For char or date types this is the maximum number of characters; for numeric or decimal types this is precision. // 8. DECIMAL_DIGITS int => the number of fractional digits. Null is returned for data types where DECIMAL_DIGITS is not applicable. // 9. NUM_PREC_RADIX int => Radix (typically either 10 or 2) // 10. NULLABLE int => whether NULL is allowed // attributeNoNulls - might not allow NULL values // attributeNullable - definitely allows NULL values // attributeNullableUnknown - nullability unknown // 11. REMARKS String => comment describing column (may be null) // 12. ATTR_DEF String => default value (may be null) // 13. SQL_DATA_TYPE int => unused // 14. SQL_DATETIME_SUB int => unused // 15. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column // 16. ORDINAL_POSITION int => index of the attribute in the UDT (starting at 1) // 17. IS_NULLABLE String => ISO rules are used to determine the nullability for a attribute. // YES --- if the attribute can include NULLs // NO --- if the attribute cannot include NULLs // empty string --- if the nullability for the attribute is unknown // 18. SCOPE_CATALOG String => catalog of table that is the scope of a reference attribute (null if DATA_TYPE isn't REF) // 19. SCOPE_SCHEMA String => schema of table that is the scope of a reference attribute (null if DATA_TYPE isn't REF) // 20. SCOPE_TABLE String => table name that is the scope of a reference attribute (null if the DATA_TYPE isn't REF) // 21. SOURCE_DATA_TYPE short => source type of a distinct type or user-generated Ref type,SQL type from java.sql.Types (null if DATA_TYPE isn't DISTINCT or user-generated REF) final List<List<Object>> metaData = new ArrayList<>(); return new DocumentDbListResultSet( null, buildAttributesColumnMetaData(properties.getDatabase()), metaData); } @Override public int getDatabaseMajorVersion() { // TODO: Implement return 4; } @Override public int getDatabaseMinorVersion() { // TODO: Implement return 0; } @Override public int getJDBCMajorVersion() { return 4; } @Override public int getJDBCMinorVersion() { return 2; } @Override public ResultSet getSchemas(final String catalog, final String schemaPattern) { final List<List<Object>> metaData = new ArrayList<>(); // 1. TABLE_SCHEM String => schema name // 2. TABLE_CATALOG String => catalog name (may be null) if (isNullOrWhitespace(catalog)) { if (isNullOrWhitespace(schemaPattern) || properties.getDatabase().matches(convertPatternToRegex(schemaPattern))) { final List<Object> row = new ArrayList<>( Arrays.asList(properties.getDatabase(), null)); metaData.add(row); } } return new DocumentDbListResultSet( null, buildSchemasColumnMetaData(properties.getDatabase()), metaData); } @Override public ResultSet getClientInfoProperties() throws SQLException { // TODO: Implement throw new SQLFeatureNotSupportedException(); } @Override public boolean supportsFullOuterJoins() { return false; } @Override public boolean nullsAreSortedLow() { return true; } @Override public int getMaxTablesInSelect() { return 0; } /** * Expects a string with zero-or more occurrences of '%' and '_' in the pattern. * Here we're converting the SQL-type pattern to a Regex pattern. * * @param pattern the SQL-type pattern to convert. * * @return the pattern converted to a Regex pattern */ @VisibleForTesting static String convertPatternToRegex(final String pattern) { if (isNullOrWhitespace(pattern)) { return ""; } final StringBuilder converted = new StringBuilder(); boolean escapeFound = false; int start = 0; for (int index = 0; index < pattern.length(); index++) { final char currChar = pattern.charAt(index); if (currChar == ESCAPE_CHAR) { if (escapeFound) { // I.e., \\ - two backslash start = updateRegexExpression(index - 1, start, pattern, "[\\]", converted) + 1; } escapeFound = !escapeFound; } else if (escapeFound) { start = updateRegexExpression(index - 1, start, pattern, "[" + currChar + "]", converted) + 1; escapeFound = false; } else if (currChar == '_') { start = updateRegexExpression(index, start, pattern, ".", converted); } else if (currChar == '%') { start = updateRegexExpression(index, start, pattern, ".*", converted); } } // Handle the trailing string. if (pattern.length() - start > 0) { converted.append(Pattern.quote(pattern.substring(start))); } return converted.toString(); } private static int updateRegexExpression(final int index, final int start, final String pattern, final String str, final StringBuilder converted) { if (index - start > 0) { converted.append(Pattern.quote(pattern.substring(start, index))); } converted.append(str); return index + 1; } }
4,563
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbConnectionProperty.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import org.checkerframework.checker.nullness.qual.NonNull; import software.amazon.documentdb.jdbc.common.utilities.ConnectionProperty; import java.util.Arrays; /** * The enumeration of connection properties. */ public enum DocumentDbConnectionProperty implements ConnectionProperty { USER("user", "", "User name used for SCRAM-based authentication."), PASSWORD("password","", "Password used for SCRAM-based authentication"), HOSTNAME("host", "", "The host name or IP address of the DocumentDB server or cluster."), DATABASE("database", "", "The name of the database to connect to in DocumentDB."), READ_PREFERENCE("readPreference", "primary", "The read preference"), APPLICATION_NAME(ConnectionProperty.APPLICATION_NAME, DocumentDbConnectionProperties.DEFAULT_APPLICATION_NAME, "Sets the logical name of the application. The application name may be used by the client to identify the application to the server, for use in server logs, slow query logs, and profile collection."), REPLICA_SET("replicaSet", "rs0", "Implies that the hosts given are a seed list, and the driver will attempt to find all members of the set."), TLS_ENABLED("tls", "true", "Whether to connect using TLS"), TLS_ALLOW_INVALID_HOSTNAMES("tlsAllowInvalidHostnames", "false", "Whether to allow invalid host names for TLS connections. Equivalent to tlsInsecure."), LOGIN_TIMEOUT_SEC("loginTimeoutSec", "10", "How long a connection can take to be opened before timing out (in seconds)."), RETRY_READS_ENABLED("retryReads", "true", "If true the driver will retry supported read operations if they fail due to a network error. Defaults to true."), METADATA_SCAN_METHOD("scanMethod", "random", "Method of scanning for metadata."), METADATA_SCAN_LIMIT("scanLimit", "1000", "Number of records to scan for metadata"), TLS_CA_FILE("tlsCAFile", "", "The path to the Certificate Authority (CA) '.pem' file."), SCHEMA_NAME("schemaName", "_default", "The name of the stored schema to use."), SSH_USER("sshUser", "", "The user name for the SSH tunnel."), SSH_HOSTNAME("sshHost", "", "The host name for the SSH tunnel. Optionally the SSH tunnel port number can be " + "provided using the syntax '<ssh-host>:<port>'. The default port is '22'."), SSH_PRIVATE_KEY_FILE("sshPrivateKeyFile", "", "The path to the private key file for the SSH tunnel."), SSH_PRIVATE_KEY_PASSPHRASE("sshPrivateKeyPassphrase", "", "If the SSH tunnel private key file is passphrase protected, " + "provide the passphrase using this option."), SSH_STRICT_HOST_KEY_CHECKING("sshStrictHostKeyChecking", "true", "If true, the 'known_hosts' file is checked to ensure the target host is trusted when creating the SSH tunnel. If false, the target host is not checked. Default is 'false'."), SSH_KNOWN_HOSTS_FILE("sshKnownHostsFile", "", "The path to the 'known_hosts' file used for checking the target host for the SSH tunnel when option 'sshStrictHostKeyChecking' is 'true'. Default is '~/.ssh/known_hosts'."), DEFAULT_FETCH_SIZE("defaultFetchSize", String.valueOf(DocumentDbConnectionProperties.FETCH_SIZE_DEFAULT), "The default fetch size (in records) when retrieving results from Amazon DocumentDB. It is the number of records to retrieve in a single batch. The maximum number of records retrieved in a single batch may also be limited by the overall memory size of the result. The value can be changed by calling the `Statement.setFetchSize` JDBC method. Default is '2000'."), REFRESH_SCHEMA("refreshSchema", "false", "Refreshes any existing schema with a newly generated schema when the connection first requires the schema. Note that this will remove any existing schema customizations and will reduce performance for the first query or metadata inquiry."), DEFAULT_AUTH_DB("defaultAuthDb", "admin", "The default authentication database to use."), ALLOW_DISK_USE("allowDiskUse", "default", "Indicator of whether to enable or disable the 'allow disk use' option on all queries. Valid values are 'enable', 'disable' or 'default'. Default is 'default'."), ; // Unsupported MongoDB connection properties that will be ignored but should have warnings. private static final String[] UNSUPPORTED_MONGO_DB_PROPERTIES = { "authMechanism", "authMechanismProperties", "authSource", "gssapiServiceName", "serverSelectionTimeoutMS", "serverSelectionTryOnce", "localThresholdMS", "heartbeatFrequencyMS", "ssl", "sslInvalidHostnamesAllowed", "sslAllowInvalidCertificates", "sslPEMKeyFile", "sslPEMKeyPassword", "sslCAFile", "tlsInsecure", "tlsCertificateKeyFile", "tlsCertificateKeyFilePassword", "tlsAllowInvalidCertificates", "connectTimeoutMS", "socketTimeoutMS", "maxIdleTimeMS", "maxLifeTimeMS", "maxPoolSize", "minPoolSize", "waitQueueMultiple", "waitQueueTimeoutMS", "safe", "journal", "w", "retryWrites", "wtimeoutMS", "readPreferenceTags", "readConcernLevel", "maxStalenessSeconds", "compressors", "zlibCompressionLevel", "uuidRepresentation" }; private final String connectionProperty; private final String defaultValue; private final String description; /** * DocumentDbConnectionProperty constructor. * * @param connectionProperty String representing the connection property. * @param defaultValue String representing the default value of the property. * @param description Description of the property. */ DocumentDbConnectionProperty( final @NonNull String connectionProperty, final @NonNull String defaultValue, final @NonNull String description) { this.connectionProperty = connectionProperty; this.defaultValue = defaultValue; this.description = description; } /** * Gets connection property. * * @return the connection property. */ public @NonNull String getName() { return connectionProperty; } /** * Gets the default value of the connection property. * * @return the default value of the connection property. */ public @NonNull String getDefaultValue() { return defaultValue; } /** * Gets description. * * @return the description. */ public String getDescription() { return description; } /** * Check if the property is supported by the driver. * * @param name The name of the property. * @return {@code true} if property is supported; {@code false} otherwise. */ public static boolean isSupportedProperty(final String name) { return Arrays .stream(DocumentDbConnectionProperty.values()) .anyMatch(value -> value.getName().equals(name)); } /** * Check if the property is unsupported by the driver but still a valid MongoDB option. * * @param name The name of the property. * @return {@code true} if property is valid but unsupported; {@code false} otherwise. */ public static boolean isUnsupportedMongoDBProperty(final String name) { return Arrays.asList(UNSUPPORTED_MONGO_DB_PROPERTIES).contains(name); } static DocumentDbConnectionProperty getPropertyFromKey(final String key) { for (DocumentDbConnectionProperty connectionProperty: DocumentDbConnectionProperty.values()) { if (connectionProperty.getName().equals(key)) { return connectionProperty; } } return null; } }
4,564
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbResultSetMetaData.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.ResultSetMetaData; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import java.sql.SQLException; /** * DocumentDb implementation of ResultSetMetadata. */ public class DocumentDbResultSetMetaData extends ResultSetMetaData implements java.sql.ResultSetMetaData { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbResultSetMetaData.class); private final ImmutableList<JdbcColumnMetaData> columnMetaData; private final int columnCount; DocumentDbResultSetMetaData(final ImmutableList<JdbcColumnMetaData> columnMetaData) { this.columnMetaData = columnMetaData; this.columnCount = columnMetaData.size(); } private void verifyColumnIndex(final int columnIndex) throws SQLException { if (columnIndex < 1 || columnIndex > columnCount) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.INVALID_INDEX, columnIndex, columnCount); } } @Override public int getColumnCount() { return columnCount; } @Override public boolean isAutoIncrement(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isAutoIncrement(); } @Override public boolean isCaseSensitive(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isCaseSensitive(); } @Override public boolean isSearchable(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isSearchable(); } @Override public boolean isCurrency(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isCurrency(); } @Override public int isNullable(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getNullable(); } @Override public boolean isSigned(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isSigned(); } @Override public int getColumnDisplaySize(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getColumnDisplaySize(); } @Override public String getColumnLabel(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getColumnLabel(); } @Override public String getColumnName(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getColumnName(); } @Override public String getSchemaName(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getSchemaName(); } @Override public int getPrecision(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getPrecision(); } @Override public int getScale(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getScale(); } @Override public String getTableName(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getTableName(); } @Override public String getCatalogName(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getCatalogName(); } @Override public int getColumnType(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getColumnType(); } @Override public String getColumnTypeName(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getColumnTypeName(); } @Override public boolean isReadOnly(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isReadOnly(); } @Override public boolean isWritable(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isWritable(); } @Override public boolean isDefinitelyWritable(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).isDefinitelyWritable(); } @Override public String getColumnClassName(final int column) throws SQLException { verifyColumnIndex(column); return columnMetaData.get(column - 1).getColumnClassName(); } }
4,565
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbMetadataScanMethod.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; /** * The enumeration of methods to scan for metadata. */ public enum DocumentDbMetadataScanMethod { ID_FORWARD("idForward"), ID_REVERSE("idReverse"), ALL("all"), RANDOM("random"); private final String name; DocumentDbMetadataScanMethod(final String name) { this.name = name; } public String getName() { return name; } /** * Returns DocumentDbScanMethod with a name that matches input string. * @param scanMethodString name of the scan method * @return DocumentDbScanMethod of string. */ public static DocumentDbMetadataScanMethod fromString(final String scanMethodString) { for (DocumentDbMetadataScanMethod scanMethod: DocumentDbMetadataScanMethod.values()) { if (scanMethod.name.equals(scanMethodString)) { return scanMethod; } } throw new IllegalArgumentException("Invalid scan method."); } }
4,566
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbDataSource.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.DataSource; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import javax.sql.PooledConnection; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; /** * DocumentDb implementation of DataSource. */ public class DocumentDbDataSource extends DataSource { private final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(); private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbDriver.class.getName()); @Override public java.sql.Connection getConnection() throws SQLException { properties.validateRequiredProperties(); return DriverManager.getConnection(DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties); } @Override public Connection getConnection(final String username, final String password) throws SQLException { setUser(username); setPassword(password); properties.validateRequiredProperties(); return DriverManager.getConnection(DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME, properties); } /** * Sets the timeout for opening a connection. * * @param seconds The connection timeout in seconds. * * @throws SQLException if timeout is negative. */ @Override public void setLoginTimeout(final int seconds) throws SQLException { if (seconds < 0) { throwInvalidTimeoutException(seconds); } properties.setLoginTimeout(String.valueOf(seconds)); } /** * Sets the timeout for opening a connection. * * @return the connection timeout in seconds. */ @Override public int getLoginTimeout() { return properties.getLoginTimeout(); } @Override public PooledConnection getPooledConnection() throws SQLException { return new DocumentDbPooledConnection(getConnection()); } @Override public PooledConnection getPooledConnection(final String user, final String password) throws SQLException { return new DocumentDbPooledConnection(getConnection(user, password)); } /** * Sets the username for connection to DocumentDb. * * @param username The username to authenticate with. */ public void setUser(final String username) { properties.setUser(username); } /** * Gets the username. * * @return The username to authenticate with. */ public String getUser() { return properties.getUser(); } /** * Sets the password for connection to DocumentDb. * * @param password The password to authenticate with. */ public void setPassword(final String password) { properties.setPassword(password); } /** * Gets the password. * * @return The password to authenticate with. */ public String getPassword() { return properties.getPassword(); } /** * Sets the database name. * * @param database The name of the database. */ public void setDatabase(final String database) { properties.setDatabase(database); } /** * Gets the database name. * * @return The database to connect to. */ public String getDatabase() { return properties.getDatabase(); } /** * Sets the host name. * * @param hostname The hostname to connect to. */ public void setHostname(final String hostname) { properties.setHostname(hostname); } /** * Gets the hostname. * * @return The database to connect to. */ public String getHostname() { return properties.getHostname(); } /** * Sets the read preference when connecting as a replica set. * * @param readPreference The name of the read preference. */ public void setReadPreference(final DocumentDbReadPreference readPreference) { properties.setReadPreference(readPreference.getName()); } /** * Gets the read preference. * * @return The database to connect to. */ public DocumentDbReadPreference getReadPreference() { return properties.getReadPreference(); } /** * Sets the application name. * * @param applicationName The name of the application */ public void setApplicationName(final String applicationName) { properties.setApplicationName(applicationName); } /** * Gets the application name. * * @return The name of the application. */ public String getApplicationName() { return properties.getApplicationName(); } /** * Sets the replica set name. * * @param replicaSet The name of the replica set to connect to. */ public void setReplicaSet(final String replicaSet) { if (replicaSet != null && !replicaSet.equals(DocumentDbConnectionProperty.REPLICA_SET.getDefaultValue())) { LOGGER.warn(String.format("DocumentDB may not support replica set '%s'.", replicaSet)); } if ( replicaSet != null) { properties.setReplicaSet(replicaSet); } } /** * Gets the replica set name. * * @return The name of the replica set. */ public String getReplicaSet() { return properties.getReplicaSet(); } /** * Sets the TLS enabled flag. * * @param tlsEnabled {@code true} if TLS/SSL is enabled; {@code false} otherwise. */ public void setTlsEnabled(final boolean tlsEnabled) { properties.setTlsEnabled(String.valueOf(tlsEnabled)); } /** * Gets the TLS enabled flag. * * @return {@code true} if TLS/SSL is enabled; {@code false} otherwise. */ public boolean getTlsEnabled() { return properties.getTlsEnabled(); } /** * Sets allow invalid hostnames flag for TLS connections. * * @param allowInvalidHostnames Whether invalid hostnames are allowed when connecting with * TLS/SSL. */ public void setTlsAllowInvalidHostnames(final boolean allowInvalidHostnames) { properties.setTlsAllowInvalidHostnames(String.valueOf(allowInvalidHostnames)); } /** * Gets the allow invalid hostnames flag for TLS connections. * * @return {@code true} if invalid host names are allowed; {@code false} otherwise. */ public boolean getTlsAllowInvalidHosts() { return properties.getTlsAllowInvalidHostnames(); } /** * Sets retry reads flag. * * @param retryReadsEnabled Whether the driver should retry read operations if they fail due to * a network error */ public void setRetryReadsEnabled(final boolean retryReadsEnabled) { properties.setRetryReadsEnabled(String.valueOf(retryReadsEnabled)); } /** * Gets the retry reads flag. * * @return {@code true} if the driver should retry read operations if they fail due to a network * error; {@code false} otherwise. */ public boolean getRetryReadsEnabled() { return properties.getRetryReadsEnabled(); } private void throwInvalidTimeoutException(final long timeout) throws SQLException { throw SqlError.createSQLException( LOGGER, SqlState.DATA_EXCEPTION, SqlError.INVALID_TIMEOUT, Long.valueOf(timeout) ); } @VisibleForTesting void validateRequiredProperties() throws SQLException { properties.validateRequiredProperties(); } }
4,567
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbQueryExecutor.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.mongodb.MongoException; import com.mongodb.client.AggregateIterable; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import org.bson.Document; import org.bson.conversions.Bson; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.query.DocumentDbMqlQueryContext; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingService; import java.sql.SQLException; import java.time.Instant; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; /** * DocumentDb implementation of QueryExecution. */ public class DocumentDbQueryExecutor { private static final int OPERATION_CANCELLED_CODE = 11601; private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbQueryExecutor.class); private final Object queryStateLock = new Object(); private final java.sql.Statement statement; private final DocumentDbConnectionProperties connectionProperties; private final DocumentDbQueryMappingService queryMapper; private int fetchSize; private int queryTimeout; private DocumentDbAllowDiskUseOption allowDiskUse; private String queryId = null; private QueryState queryState = QueryState.NOT_STARTED; private enum QueryState { NOT_STARTED, IN_PROGRESS, CANCELED } /** * DocumentDbQueryExecutor constructor. */ DocumentDbQueryExecutor( final java.sql.Statement statement, final DocumentDbConnectionProperties connectionProperties, final DocumentDbQueryMappingService queryMapper, final int queryTimeoutSecs, final int fetchSize) { this.statement = statement; this.connectionProperties = connectionProperties; this.queryMapper = queryMapper; this.fetchSize = fetchSize; this.queryTimeout = queryTimeoutSecs; this.allowDiskUse = connectionProperties.getAllowDiskUseOption(); } /** * This function wraps query cancellation and ensures query state is kept consistent. * * @throws SQLException If query cancellation fails or cannot be executed. * @param isClosing An indicator for whether the statement is closing. */ protected void cancelQuery(final boolean isClosing) throws SQLException { synchronized (queryStateLock) { if (queryState.equals(QueryState.CANCELED)) { return; } else if (queryState.equals(QueryState.NOT_STARTED)) { if (isClosing) { return; } throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_NOT_STARTED_OR_COMPLETE); } performCancel(); queryState = QueryState.CANCELED; } } /** * This function wraps query execution and ensures query state is kept consistent. * * @param query Query to execute. * @return ResultSet Object. * @throws SQLException if query execution fails, or it was cancelled. */ public java.sql.ResultSet executeQuery(final String query) throws SQLException { synchronized (queryStateLock) { if (queryState.equals(QueryState.IN_PROGRESS)) { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_IN_PROGRESS); } queryState = QueryState.IN_PROGRESS; queryId = UUID.randomUUID().toString(); } try { final java.sql.ResultSet resultSet = runQuery(query); synchronized (queryStateLock) { if (queryState.equals(QueryState.CANCELED)) { resetQueryState(); throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_CANCELED); } } return resultSet; } catch (final SQLException e) { throw e; } catch (final Exception e) { synchronized (queryStateLock) { if (e instanceof MongoException && ((MongoException) e).getCode() == OPERATION_CANCELLED_CODE && queryState.equals(QueryState.CANCELED)) { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_CANCELED); } else { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_FAILED, e); } } } finally { resetQueryState(); } } /** * Function to execute query. * @param sql Query to execute. * @return java.sql.ResultSet object returned from query execution. * @throws SQLException throws a SQLException */ @VisibleForTesting protected java.sql.ResultSet runQuery(final String sql) throws SQLException { final Instant beginTranslation = Instant.now(); LOGGER.info("Query {}: Beginning translation of query.", queryId); LOGGER.debug("Query {}: {}", queryId, sql); final long maxRows = statement.getLargeMaxRows(); final DocumentDbMqlQueryContext queryContext = queryMapper.get(sql, maxRows); LOGGER.info("Query {}: Took {} ms to translate query.", queryId, Instant.now().toEpochMilli() - beginTranslation.toEpochMilli()); if (!(statement.getConnection() instanceof DocumentDbConnection)) { throw new SQLException("Unexpected operation state."); } final Instant beginExecution = Instant.now(); final DocumentDbConnection connection = (DocumentDbConnection) statement.getConnection(); final DocumentDbConnectionProperties properties = connection.getConnectionProperties(); final MongoClient client = connection.getMongoClient(); final MongoDatabase database = client.getDatabase(properties.getDatabase()); final MongoCollection<Document> collection = database .getCollection(queryContext.getCollectionName()); final List<Bson> aggregateOperations = queryContext.getAggregateOperations(); AggregateIterable<Document> iterable = collection.aggregate(aggregateOperations); if (getQueryTimeout() > 0) { iterable = iterable.maxTime(getQueryTimeout(), TimeUnit.SECONDS); } if (getFetchSize() > 0) { iterable = iterable.batchSize(getFetchSize()); } if (getAllowDiskUse() == DocumentDbAllowDiskUseOption.ENABLE) { iterable = iterable.allowDiskUse(true); } else if (getAllowDiskUse() == DocumentDbAllowDiskUseOption.DISABLE) { iterable = iterable.allowDiskUse(false); } final ImmutableList<JdbcColumnMetaData> columnMetaData = ImmutableList .copyOf(queryContext.getColumnMetaData()); final MongoCursor<Document> iterator = iterable.iterator(); LOGGER.info("Query {}: Took {} ms to execute query and retrieve first batch of results.", queryId, Instant.now().toEpochMilli() - beginExecution.toEpochMilli()); LOGGER.debug("Query {}: Executed on collection {} with following pipeline operations: {}", queryId, queryContext.getCollectionName(), queryContext.getAggregateOperations().toString()); return new DocumentDbResultSet( this.statement, iterator, columnMetaData, queryContext.getPaths()); } private void resetQueryState() { queryState = QueryState.NOT_STARTED; queryId = null; } private void performCancel() throws SQLException { try (MongoClient client = connectionProperties.createMongoClient()) { final MongoDatabase database = client.getDatabase("admin"); // Find the opId to kill using the queryId. final Document currentOp = database.runCommand( new Document("currentOp", 1) .append("$ownOps", true) .append("command.comment", queryId)); if (!(currentOp.get("inprog") instanceof List)) { throw new SQLException("Unexpected operation state."); } final List<?> ops = (List<?>) currentOp.get("inprog"); // If there are no results, the aggregation has not been executed yet or is complete. if (ops.isEmpty()) { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_NOT_STARTED_OR_COMPLETE); } // If there is more than 1 result then more than 1 operations have been given same id, // and we do not know which to cancel. if (ops.size() != 1) { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_CANNOT_BE_CANCELED, "More than one running operation matched the query ID."); } if (!(ops.get(0) instanceof Document)) { throw new SQLException("Unexpected operation state."); } final Object opId = ((Document)ops.get(0)).get("opid"); if (opId == null) { throw new SQLException("Unexpected operation state."); } // Cancel the aggregation using killOp. final Document killOp = database.runCommand(new Document("killOp", 1) .append("op", opId)); // Throw error with info if command did not succeed. if (!killOp.get("ok").equals(1.0)) { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_CANNOT_BE_CANCELED, killOp.get("info")); } } catch (SQLException e) { throw e; } catch (Exception e) { throw SqlError.createSQLException( LOGGER, SqlState.OPERATION_CANCELED, SqlError.QUERY_CANNOT_BE_CANCELED, e); } } protected String getQueryId() { return queryId; } protected int getQueryTimeout() { return queryTimeout; } protected void setQueryTimeout(final int queryTimeout) { this.queryTimeout = queryTimeout; } protected int getFetchSize() { return fetchSize; } protected void setFetchSize(final int fetchSize) { this.fetchSize = fetchSize; } protected DocumentDbAllowDiskUseOption getAllowDiskUse() { return allowDiskUse; } protected void setAllowDiskUse(final DocumentDbAllowDiskUseOption allowDiskUse) { this.allowDiskUse = allowDiskUse; } }
4,568
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbDatabaseMetaDataResultSets.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; import java.sql.ResultSetMetaData; import java.sql.Types; /** * Contains static methods to get DatabaseMetadata column metadata instances. */ class DocumentDbDatabaseMetaDataResultSets { private static ImmutableList<JdbcColumnMetaData> proceduresColumnMetaData; private static ImmutableList<JdbcColumnMetaData> tablesColumnMetaData; private static ImmutableList<JdbcColumnMetaData> schemasColumnMetaData; private static ImmutableList<JdbcColumnMetaData> catalogsColumnMetaData; private static ImmutableList<JdbcColumnMetaData> tableTypesColumnMetaData; private static ImmutableList<JdbcColumnMetaData> columnsColumnMetaData; private static ImmutableList<JdbcColumnMetaData> columnPrivilegesColumnMetaData; private static ImmutableList<JdbcColumnMetaData> attributesColumnMetaData; private static ImmutableList<JdbcColumnMetaData> primaryKeysColumnMetaData; private static ImmutableList<JdbcColumnMetaData> importedKeysColumnMetaData; private static ImmutableList<JdbcColumnMetaData> typeInfoColumnMetaData; static ImmutableList<JdbcColumnMetaData> buildProceduresColumnMetaData( final String schemaName) { if (proceduresColumnMetaData == null) { // 1. PROCEDURE_CAT String => procedure catalog (may be null) // 2. PROCEDURE_SCHEM String => procedure schema (may be null) // 3. PROCEDURE_NAME String => procedure name // 4. reserved for future use // 5. reserved for future use // 6. reserved for future use // 7. REMARKS String => explanatory comment on the procedure // 8. PROCEDURE_TYPE short => kind of procedure: // procedureResultUnknown - Cannot determine if a return value will be returned // procedureNoResult - Does not return a return value // procedureReturnsResult - Returns a return value // 9. SPECIFIC_NAME String => The name which uniquely identifies this procedure within its schema. int ordinal = 0; proceduresColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PROCEDURE_CAT", //label, "PROCEDURE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PROCEDURE_SCHEM", //label, "PROCEDURE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PROCEDURE_NAME", //label, "PROCEDURE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "FUTURE_USE1", //label, "FUTURE_USE1", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "FUTURE_USE2", //label, "FUTURE_USE2", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "FUTURE_USE3", //label, "FUTURE_USE3", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "REMARKS", //label, "REMARKS", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PROCEDURE_TYPE", //label, "PROCEDURE_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "SPECIFIC_NAME", //label, "SPECIFIC_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return proceduresColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildTablesColumnMetaData( final String schemaName) { if (tablesColumnMetaData == null) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. TABLE_TYPE String => table type. Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS", "SYNONYM". // 5. REMARKS String => explanatory comment on the table // 6. TYPE_CAT String => the types catalog (may be null) // 7. TYPE_SCHEM String => the types schema (may be null) // 8. TYPE_NAME String => type name (may be null) // 9. SELF_REFERENCING_COL_NAME String => name of the designated "identifier" column of a typed table (may be null) // 10. REF_GENERATION String => specifies how values in SELF_REFERENCING_COL_NAME are created. Values are "SYSTEM", "USER", "DERIVED". (may be null) int ordinal = 0; tablesColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_CAT", //label, "TABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_SCHEM", //label, "TABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TABLE_NAME", //label, "TABLE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TABLE_TYPE", //label, "TABLE_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "REMARKS", //label, "REMARKS", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TYPE_CAT", //label, "TYPE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TYPE_SCHEM", //label, "TYPE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TYPE_NAME", //label, "TYPE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "SELF_REFERENCING_COL_NAME", //label, "SELF_REFERENCING_COL_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "REF_GENERATION", //label, "REF_GENERATION", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return tablesColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildSchemasColumnMetaData( final String schemaName) { if (schemasColumnMetaData == null) { int ordinal = 0; schemasColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TABLE_SCHEM", //label, "TABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_CATALOG", //label, "TABLE_CATALOG", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return schemasColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildCatalogsColumnMetaData( final String schemaName) { if (catalogsColumnMetaData == null) { // 1. TABLE_CAT String => catalog name final int ordinal = 0; catalogsColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TABLE_CAT", //label, "TABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return catalogsColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildTableTypesColumnMetaData( final String schemaName) { if (tableTypesColumnMetaData == null) { // 1. TABLE_TYPE String => table type. Typical types are "TABLE", "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY", "LOCAL TEMPORARY", "ALIAS", "SYNONYM". final int ordinal = 0; tableTypesColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TABLE_TYPE", //label, "TABLE_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return tableTypesColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildColumnsColumnMetaData( final String schemaName) { if (columnsColumnMetaData == null) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. DATA_TYPE int => SQL type from java.sql.Types // 6. TYPE_NAME String => Data source dependent type name, for a UDT the type name is fully qualified // 7. COLUMN_SIZE int => column size. // 8. BUFFER_LENGTH is not used. // 9. DECIMAL_DIGITS int => the number of fractional digits. Null is returned for data types where DECIMAL_DIGITS is not applicable. // 10. NUM_PREC_RADIX int => Radix (typically either 10 or 2) // 11. NULLABLE int => is NULL allowed. // columnNoNulls - might not allow NULL values // columnNullable - definitely allows NULL values // columnNullableUnknown - nullability unknown // 12. REMARKS String => comment describing column (may be null) // 13. COLUMN_DEF String => default value for the column, which should be interpreted as a string when the value is enclosed in single quotes (may be null) // 14. SQL_DATA_TYPE int => unused // 15. SQL_DATETIME_SUB int => unused // 16. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column // 17. ORDINAL_POSITION int => index of column in table (starting at 1) // 18. IS_NULLABLE String => ISO rules are used to determine the nullability for a column. // YES --- if the column can include NULLs // NO --- if the column cannot include NULLs // empty string --- if the nullability for the column is unknown // 19. SCOPE_CATALOG String => catalog of table that is the scope of a reference attribute (null if DATA_TYPE isn't REF) // 20. SCOPE_SCHEMA String => schema of table that is the scope of a reference attribute (null if the DATA_TYPE isn't REF) // 21. SCOPE_TABLE String => table name that this the scope of a reference attribute (null if the DATA_TYPE isn't REF) // 22. SOURCE_DATA_TYPE short => source type of a distinct type or user-generated Ref type, SQL type from java.sql.Types (null if DATA_TYPE isn't DISTINCT or user-generated REF) // 23. IS_AUTOINCREMENT String => Indicates whether this column is auto incremented // YES --- if the column is auto incremented // NO --- if the column is not auto incremented // empty string --- if it cannot be determined whether the column is auto incremented // 24. IS_GENERATEDCOLUMN String => Indicates whether this is a generated column // YES --- if this a generated column // NO --- if this not a generated column // empty string --- if it cannot be determined whether this is a generated column int ordinal = 0; columnsColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_CAT", //label, "TABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_SCHEM", //label, "TABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 120, //displaySize, "TABLE_NAME", //label, "TABLE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 255, //displaySize, "COLUMN_NAME", //label, "COLUMN_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "DATA_TYPE", //label, "DATA_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TYPE_NAME", //label, "TYPE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "COLUMN_SIZE", //label, "COLUMN_SIZE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "BUFFER_LENGTH", //label, "BUFFER_LENGTH", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "DECIMAL_DIGITS", //label, "DECIMAL_DIGITS", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "NUM_PREC_RADIX", //label, "NUM_PREC_RADIX", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 2, //displaySize, "NULLABLE", //label, "NULLABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 255, //displaySize, "REMARKS", //label, "REMARKS", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 255, //displaySize, "COLUMN_DEF", //label, "COLUMN_DEF", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "SQL_DATA_TYPE", //label, "SQL_DATA_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "SQL_DATETIME_SUB", //label, "SQL_DATETIME_SUB", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "CHAR_OCTET_LENGTH", //label, "CHAR_OCTET_LENGTH", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 12, //displaySize, "ORDINAL_POSITION", //label, "ORDINAL_POSITION", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 12, //displaySize, "IS_NULLABLE", //label, "IS_NULLABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "SCOPE_CATALOG", //label, "SCOPE_CATALOG", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "SCOPE_SCHEMA", //label, "SCOPE_SCHEMA", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 120, //displaySize, "SCOPE_TABLE", //label, "SCOPE_TABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "SOURCE_DATA_TYPE", //label, "SOURCE_DATA_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 3, //displaySize, "IS_AUTOINCREMENT", //label, "IS_AUTOINCREMENT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 3, //displaySize, "IS_GENERATEDCOLUMN", //label, "IS_GENERATEDCOLUMN", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return columnsColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildColumnPrivilegesColumnMetaData( final String schemaName) { if (columnPrivilegesColumnMetaData == null) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. GRANTOR String => grantor of access (may be null) // 6. GRANTEE String => grantee of access // 7. PRIVILEGE String => name of access (SELECT, INSERT, UPDATE, REFRENCES, ...) // 8. IS_GRANTABLE String => "YES" if grantee is permitted to grant to others; "NO" if not; null if unknown int ordinal = 0; columnPrivilegesColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_CAT", //label, "TABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_SCHEM", //label, "TABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "TABLE_NAME", //label, "TABLE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "COLUMN_NAME", //label, "COLUMN_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "GRANTOR", //label, "GRANTOR", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "GRANTEE", //label, "GRANTEE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "PRIVILEGE", //label, "PRIVILEGE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "IS_GRANTABLE", //label, "IS_GRANTABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .build(); } return columnPrivilegesColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildAttributesColumnMetaData( final String schemaName) { if (attributesColumnMetaData == null) { // 1. TYPE_CAT String => type catalog (may be null) // 2. TYPE_SCHEM String => type schema (may be null) // 3. TYPE_NAME String => type name // 4. ATTR_NAME String => attribute name // 5. DATA_TYPE int => attribute type SQL type from java.sql.Types // 6. ATTR_TYPE_NAME String => Data source dependent type name. For a UDT, the type name is fully qualified. For a REF, the type name is fully qualified and represents the target type of the reference type. // 7. ATTR_SIZE int => column size. For char or date types this is the maximum number of characters; for numeric or decimal types this is precision. // 8. DECIMAL_DIGITS int => the number of fractional digits. Null is returned for data types where DECIMAL_DIGITS is not applicable. // 9. NUM_PREC_RADIX int => Radix (typically either 10 or 2) // 10. NULLABLE int => whether NULL is allowed // attributeNoNulls - might not allow NULL values // attributeNullable - definitely allows NULL values // attributeNullableUnknown - nullability unknown // 11. REMARKS String => comment describing column (may be null) // 12. ATTR_DEF String => default value (may be null) // 13. SQL_DATA_TYPE int => unused // 14. SQL_DATETIME_SUB int => unused // 15. CHAR_OCTET_LENGTH int => for char types the maximum number of bytes in the column // 16. ORDINAL_POSITION int => index of the attribute in the UDT (starting at 1) // 17. IS_NULLABLE String => ISO rules are used to determine the nullability for a attribute. // YES --- if the attribute can include NULLs // NO --- if the attribute cannot include NULLs // empty string --- if the nullability for the attribute is unknown // 18. SCOPE_CATALOG String => catalog of table that is the scope of a reference attribute (null if DATA_TYPE isn't REF) // 19. SCOPE_SCHEMA String => schema of table that is the scope of a reference attribute (null if DATA_TYPE isn't REF) // 20. SCOPE_TABLE String => table name that is the scope of a reference attribute (null if the DATA_TYPE isn't REF) // 21. SOURCE_DATA_TYPE short => source type of a distinct type or user-generated Ref type,SQL type from java.sql.Types (null if DATA_TYPE isn't DISTINCT or user-generated REF) int ordinal = 0; attributesColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TYPE_CAT", //label, "TYPE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TYPE_SCHEM", //label, "TYPE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 120, //displaySize, "TYPE_NAME", //label, "TYPE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 255, //displaySize, "ATTR_NAME", //label, "ATTR_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "DATA_TYPE", //label, "DATA_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 64, //displaySize, "ATTR_TYPE_NAME", //label, "ATTR_TYPE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "ATTR_SIZE", //label, "ATTR_SIZE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "DECIMAL_DIGITS", //label, "DECIMAL_DIGITS", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "NUM_PREC_RADIX", //label, "NUM_PREC_RADIX", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 2, //displaySize, "NULLABLE", //label, "NULLABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 255, //displaySize, "REMARKS", //label, "REMARKS", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 255, //displaySize, "ATTR_DEF", //label, "ATTR_DEF", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "SQL_DATA_TYPE", //label, "SQL_DATA_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "SQL_DATETIME_SUB", //label, "SQL_DATETIME_SUB", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "CHAR_OCTET_LENGTH", //label, "CHAR_OCTET_LENGTH", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 12, //displaySize, "ORDINAL_POSITION", //label, "ORDINAL_POSITION", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 12, //displaySize, "IS_NULLABLE", //label, "IS_NULLABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "SCOPE_CATALOG", //label, "SCOPE_CATALOG", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "SCOPE_SCHEMA", //label, "SCOPE_SCHEMA", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 120, //displaySize, "SCOPE_TABLE", //label, "SCOPE_TABLE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 12, //displaySize, "SOURCE_DATA_TYPE", //label, "SOURCE_DATA_TYPE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .build(); } return attributesColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildPrimaryKeysColumnMetaData( final String schemaName) { if (primaryKeysColumnMetaData == null) { // 1. TABLE_CAT String => table catalog (may be null) // 2. TABLE_SCHEM String => table schema (may be null) // 3. TABLE_NAME String => table name // 4. COLUMN_NAME String => column name // 5. KEY_SEQ short => sequence number within primary key( a value of 1 represents the first column of the primary key, a value of 2 would represent the second column within the primary key). // 6. PK_NAME String => primary key name (may be null) int ordinal = 0; primaryKeysColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, false, //autoIncrement, true, //caseSensitive, true, //searchable, false, //currency, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_CAT", //label, "TABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, "", //tableName, "", //catalogName, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, true, //readOnly, false, //writable, false, //definitelyWritable, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //autoIncrement, true, //caseSensitive, true, //searchable, false, //currency, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "TABLE_SCHEM", //label, "TABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, "", //tableName, "", //catalogName, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, true, //readOnly, false, //writable, false, //definitelyWritable, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //autoIncrement, true, //caseSensitive, true, //searchable, false, //currency, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 120, //displaySize, "TABLE_NAME", //label, "TABLE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, "", //tableName, "", //catalogName, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, true, //readOnly, false, //writable, false, //definitelyWritable, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //autoIncrement, true, //caseSensitive, true, //searchable, false, //currency, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 255, //displaySize, "COLUMN_NAME", //label, "COLUMN_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, "", //tableName, "", //catalogName, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, true, //readOnly, false, //writable, false, //definitelyWritable, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //autoIncrement, false, //caseSensitive, true, //searchable, false, //currency, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "KEY_SEQ", //label, "KEY_SEQ", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, "", //tableName, "", //catalogName, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, true, //readOnly, false, //writable, false, //definitelyWritable, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented false, //autoIncrement, false, //caseSensitive, true, //searchable, false, //currency, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PK_NAME", //label, "PK_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, "", //tableName, "", //catalogName, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, true, //readOnly, false, //writable, false, //definitelyWritable, String.class.getName()) //columnClassName ) .build(); } return primaryKeysColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildImportedKeysColumnMetaData( final String schemaName) { if (importedKeysColumnMetaData == null) { // 1. PKTABLE_CAT String => primary key table catalog being imported (may be null) // 2. PKTABLE_SCHEM String => primary key table schema being imported (may be null) // 3. PKTABLE_NAME String => primary key table name being imported // 4. PKCOLUMN_NAME String => primary key column name being imported // 5. FKTABLE_CAT String => foreign key table catalog (may be null) // 6. FKTABLE_SCHEM String => foreign key table schema (may be null) // 7. FKTABLE_NAME String => foreign key table name // 8. FKCOLUMN_NAME String => foreign key column name // 9. KEY_SEQ short => sequence number within a foreign key // (a value of 1 represents the first column of the foreign key, a value of 2 // would represent the second column within the foreign key). // 10. UPDATE_RULE short => What happens to a foreign key when the primary key is updated: // importedNoAction - do not allow update of primary key if it has been imported // importedKeyCascade - change imported key to agree with primary key update // importedKeySetNull - change imported key to NULL if its primary key has been updated // importedKeySetDefault - change imported key to default values if its primary key has been updated // importedKeyRestrict - same as importedKeyNoAction (for ODBC 2.x compatibility) // 11. DELETE_RULE short => What happens to the foreign key when primary is deleted. // importedKeyNoAction - do not allow delete of primary key if it has been imported // importedKeyCascade - delete rows that import a deleted key // importedKeySetNull - change imported key to NULL if its primary key has been deleted // importedKeyRestrict - same as importedKeyNoAction (for ODBC 2.x compatibility) // importedKeySetDefault - change imported key to default if its primary key has been deleted // 12. FK_NAME String => foreign key name (may be null) // 13. PK_NAME String => primary key name (may be null) // 14. DEFERRABILITY short => can the evaluation of foreign key constraints be deferred until commit // importedKeyInitiallyDeferred - see SQL92 for definition // importedKeyInitiallyImmediate - see SQL92 for definition // importedKeyNotDeferrable - see SQL92 for definition int ordinal = 0; importedKeysColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PKTABLE_CAT", //label, "PKTABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PKTABLE_SCHEM", //label, "PKTABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 120, //displaySize, "PKTABLE_NAME", //label, "PKTABLE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 255, //displaySize, "PKCOLUMN_NAME", //label, "PKCOLUMN_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "FKTABLE_CAT", //label, "FKTABLE_CAT", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "FKTABLE_SCHEM", //label, "FKTABLE_SCHEM", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 120, //displaySize, "FKTABLE_NAME", //label, "FKTABLE_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, true, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, false, //signed, 255, //displaySize, "FKCOLUMN_NAME", //label, "FKCOLUMN_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "KEY_SEQ", //label, "KEY_SEQ", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "UPDATE_RULE", //label, "UPDATE_RULE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "DELETE_RULE", //label, "DELETE_RULE", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "FK_NAME", //label, "FK_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, false, //signed, 64, //displaySize, "PK_NAME", //label, "PK_NAME", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, // not incremented false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 12, //displaySize, "DEFERRABILITY", //label, "DEFERRABILITY", //columnName, schemaName, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .build(); } return importedKeysColumnMetaData; } static ImmutableList<JdbcColumnMetaData> buildTypeInfoColumnMetaData() { /** * Retrieves a description of all the data types supported by this database. They are ordered by DATA_TYPE and then by how closely the data type maps to the corresponding JDBC SQL type. * If the database supports SQL distinct types, then getTypeInfo() will return a single row with a TYPE_NAME of DISTINCT and a DATA_TYPE of Types.DISTINCT. If the database supports SQL structured types, then getTypeInfo() will return a single row with a TYPE_NAME of STRUCT and a DATA_TYPE of Types.STRUCT. * * If SQL distinct or structured types are supported, then information on the individual types may be obtained from the getUDTs() method. * * Each type description has the following columns: * * TYPE_NAME String => Type name * DATA_TYPE int => SQL data type from java.sql.Types * PRECISION int => maximum precision * LITERAL_PREFIX String => prefix used to quote a literal (may be null) * LITERAL_SUFFIX String => suffix used to quote a literal (may be null) * CREATE_PARAMS String => parameters used in creating the type (may be null) * NULLABLE short => can you use NULL for this type. * typeNoNulls - does not allow NULL values * typeNullable - allows NULL values * typeNullableUnknown - nullability unknown * CASE_SENSITIVE boolean=> is it case sensitive. * SEARCHABLE short => can you use "WHERE" based on this type: * typePredNone - No support * typePredChar - Only supported with WHERE .. LIKE * typePredBasic - Supported except for WHERE .. LIKE * typeSearchable - Supported for all WHERE .. * UNSIGNED_ATTRIBUTE boolean => is it unsigned. * FIXED_PREC_SCALE boolean => can it be a money value. * AUTO_INCREMENT boolean => can it be used for an auto-increment value. * LOCAL_TYPE_NAME String => localized version of type name (may be null) * MINIMUM_SCALE short => minimum scale supported * MAXIMUM_SCALE short => maximum scale supported * SQL_DATA_TYPE int => unused * SQL_DATETIME_SUB int => unused * NUM_PREC_RADIX int => usually 2 or 10 * The PRECISION column represents the maximum column size that the server supports for the given datatype. For numeric data, this is the maximum precision. For character data, this is the length in characters. For datetime datatypes, this is the length in characters of the String representation (assuming the maximum allowed precision of the fractional seconds component). For binary data, this is the length in bytes. For the ROWID datatype, this is the length in bytes. Null is returned for data types where the column size is not applicable. */ if (typeInfoColumnMetaData == null) { int ordinal = 0; typeInfoColumnMetaData = ImmutableList.<JdbcColumnMetaData>builder() .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "TYPE_NAME", //label, "TYPE_NAME", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "DATA_TYPE", //label, "DATA_TYPE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "PRECISION", //label, "PRECISION", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 64, //displaySize, "LITERAL_PREFIX", //label, "LITERAL_PREFIX", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 64, //displaySize, "LITERAL_SUFFIX", //label, "LITERAL_SUFFIX", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 64, //displaySize, "CREATE_PARAMS", //label, "CREATE_PARAMS", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "NULLABLE", //label, "NULLABLE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "CASE_SENSITIVE", //label, "CASE_SENSITIVE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.BOOLEAN, //type.id, JdbcType.BOOLEAN.name(), //type.name, boolean.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "SEARCHABLE", //label, "SEARCHABLE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "UNSIGNED_ATTRIBUTE", //label, "UNSIGNED_ATTRIBUTE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.BOOLEAN, //type.id, JdbcType.BOOLEAN.name(), //type.name, boolean.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "FIXED_PREC_SCALE", //label, "FIXED_PREC_SCALE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.BOOLEAN, //type.id, JdbcType.BOOLEAN.name(), //type.name, boolean.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "AUTO_INCREMENT", //label, "AUTO_INCREMENT", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.BOOLEAN, //type.id, JdbcType.BOOLEAN.name(), //type.name, boolean.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNullable, //nullable, true, //signed, 64, //displaySize, "LOCAL_TYPE_NAME", //label, "LOCAL_TYPE_NAME", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.VARCHAR, //type.id, JdbcType.VARCHAR.name(), //type.name, String.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "MINIMUM_SCALE", //label, "MINIMUM_SCALE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "MAXIMUM_SCALE", //label, "MAXIMUM_SCALE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.SMALLINT, //type.id, JdbcType.SMALLINT.name(), //type.name, short.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "SQL_DATA_TYPE", //label, "SQL_DATA_TYPE", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal++, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "SQL_DATETIME_SUB", //label, "SQL_DATETIME_SUB", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .add(new JdbcColumnMetaData( ordinal, false, //caseSensitive, ResultSetMetaData.columnNoNulls, //nullable, true, //signed, 64, //displaySize, "NUM_PREC_RADIX", //label, "NUM_PREC_RADIX", //columnName, null, //schemaName, 0, //precision, 0, //scale, Types.INTEGER, //type.id, JdbcType.INTEGER.name(), //type.name, int.class.getName()) //columnClassName ) .build(); } return typeInfoColumnMetaData; } }
4,569
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbResultSet.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import com.mongodb.client.MongoCursor; import org.bson.Document; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.Statement; import java.util.List; import java.util.stream.Collectors; /** * DocumentDb implementation of ResultSet. */ public class DocumentDbResultSet extends DocumentDbAbstractResultSet implements java.sql.ResultSet { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbResultSet.class); private static final int DEFAULT_FETCH_SIZE = 10; // 10 is default fetch size used by most JDBC drivers. private int fetchSize; private int rowIndex = -1; private final MongoCursor<Document> iterator; private Document current; private final List<String> paths; /** * DocumentDbResultSet constructor, initializes super class. */ DocumentDbResultSet( final Statement statement, final MongoCursor<Document> iterator, final ImmutableList<JdbcColumnMetaData> columnMetaData, final List<String> paths) throws SQLException { super(statement, columnMetaData, true); this.iterator = iterator; // Set fetch size to be fetch size of statement if it exists. Otherwise, use default. this.fetchSize = statement != null ? statement.getFetchSize() : DEFAULT_FETCH_SIZE; this.paths = paths; } @Override protected void doClose() { iterator.close(); } /** * Gets the current fetch size. * Getting and setting fetch size is accepted but will not be used for this particular driver. * @return the current fetch size. */ @Override protected int getDriverFetchSize() { return this.fetchSize; } /** * Sets the current fetch size. * Getting and setting fetch size is accepted but will not be used for this particular driver. * @param rows The number of rows for the driver to fetch. */ @Override protected void setDriverFetchSize(final int rows) { this.fetchSize = rows; } @Override protected int getRowIndex() { return rowIndex; } @Override protected int getRowCount() throws SQLFeatureNotSupportedException { throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY); } @Override public boolean isLast() throws SQLException { verifyOpen(); return (current != null && !iterator.hasNext()); } @Override public boolean isAfterLast() throws SQLException { verifyOpen(); return (current == null && !iterator.hasNext()); } @Override public boolean next() throws SQLException { verifyOpen(); if (iterator.hasNext()) { current = iterator.next(); rowIndex++; return true; } else { current = null; return false; } } @Override protected Object getValue(final int columnIndex) throws SQLException { final ResultSetMetaData metadata = getMetaData(); final String path = paths.get(columnIndex - 1); if (path == null || path.isEmpty()) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.CANNOT_RETRIEVE_COLUMN, metadata.getColumnName(columnIndex)); } final String[] segmentedPath = path.split("\\."); Object segmentValue = current.get(segmentedPath[0]); for (int j = 1; j < segmentedPath.length && segmentValue instanceof Document; j++) { segmentValue = ((Document) segmentValue).get(segmentedPath[j]); } // Apache converters cannot handle the following types, must be specifically converted. if (segmentValue instanceof Document) { return ((Document) segmentValue).toJson(); } if (segmentValue instanceof List) { final List<?> modifiedList = ((List<?>) segmentValue) .stream() .map(o1 -> o1 instanceof Document ? ((Document) o1).toJson() : o1) .collect(Collectors.toList()); return modifiedList.toString(); } return segmentValue; } }
4,570
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbAllowDiskUseOption.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; /** * The enumeration of Allow Disk Use options. */ public enum DocumentDbAllowDiskUseOption { DEFAULT("default"), DISABLE("disable"), ENABLE("enable"), ; private final String name; DocumentDbAllowDiskUseOption(final String name) { this.name = name; } public String getName() { return name; } /** * Returns DocumentDbAllowDiskUseOption with a name that matches input string. * @param allowDiskUseOption name of the allow disk use option. * @return DocumentDbAllowDiskUseOption of string. */ public static DocumentDbAllowDiskUseOption fromString(final String allowDiskUseOption) { for (DocumentDbAllowDiskUseOption scanMethod: DocumentDbAllowDiskUseOption.values()) { if (scanMethod.name.equals(allowDiskUseOption)) { return scanMethod; } } throw new IllegalArgumentException("Invalid allow disk use option."); } }
4,571
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbConnection.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.mongodb.MongoCommandException; import com.mongodb.MongoSecurityException; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; import lombok.SneakyThrows; import org.bson.Document; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.Connection; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata; import software.amazon.documentdb.jdbc.sshtunnel.DocumentDbSshTunnelClient; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperty.REFRESH_SCHEMA; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_LATEST_OR_NEW; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_NEW; /** * DocumentDb implementation of Connection. */ public class DocumentDbConnection extends Connection implements java.sql.Connection { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbConnection.class.getName()); private static final Set<String> SECRET_PROPERTIES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( DocumentDbConnectionProperty.PASSWORD.getName(), DocumentDbConnectionProperty.PASSWORD.getName()))); private final DocumentDbConnectionProperties connectionProperties; private DocumentDbDatabaseMetaData metadata; private DocumentDbDatabaseSchemaMetadata databaseMetadata; private MongoClient mongoClient = null; private MongoDatabase mongoDatabase = null; private DocumentDbSshTunnelClient sshTunnelClient; /** * DocumentDbConnection constructor, initializes super class. */ DocumentDbConnection(final DocumentDbConnectionProperties connectionProperties) throws SQLException { super(connectionProperties); this.connectionProperties = connectionProperties; if (LOGGER.isDebugEnabled()) { final StringBuilder sb = new StringBuilder(); sb.append("Creating connection with following properties:"); for (String propertyName : connectionProperties.stringPropertyNames()) { if (!SECRET_PROPERTIES.contains(propertyName)) { sb.append(String.format("%n Connection property %s=%s", propertyName, connectionProperties.get(propertyName).toString())); } } LOGGER.debug(sb.toString()); } if (connectionProperties.enableSshTunnel()) { ensureSshTunnel(connectionProperties); } else { LOGGER.debug("Internal SSH tunnel not used."); } initializeClients(connectionProperties); } /** * Ensures an SSH Tunnel service is started for this set of SSH Tunnel properties, or confirms * an SSH Tunnel is already running. It ensures an SSH Tunnel client session is active and also ensures the * SSH Tunnel's listening port is valid. * * @param connectionProperties the connection properties to use for the SSH Tunnel. * @throws SQLException when unable to ensure an SSH Tunnel is started. */ private void ensureSshTunnel(final DocumentDbConnectionProperties connectionProperties) throws SQLException { try { this.sshTunnelClient = new DocumentDbSshTunnelClient(connectionProperties); } catch (SQLException e) { throw e; } catch (Exception e) { throw SqlError.createSQLException(LOGGER, SqlState.CONNECTION_EXCEPTION, e, SqlError.SSH_TUNNEL_ERROR, e.getMessage()); } } /** * Gets the ssh tunnel local port. * * @return the ssh tunnel local port if it exists; 0 otherwise. */ public int getSshLocalPort() { // Get the port from the SSH tunnel session, if it exists. if (isSshTunnelActive()) { return sshTunnelClient.getServiceListeningPort(); } return 0; } /** * Get whether the SSH tunnel is active. * * @return returns {@code true} if the SSH tunnel is active, {@code false}, otherwise. */ @SneakyThrows public boolean isSshTunnelActive() { // indicate whether the SSH tunnel is enabled return sshTunnelClient != null && sshTunnelClient.getServiceListeningPort() > 0; } @Override public boolean isValid(final int timeout) throws SQLException { if (timeout < 0) { throw SqlError.createSQLException(LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.INVALID_TIMEOUT, timeout); } if (mongoDatabase != null) { try { // Convert to milliseconds final long maxTimeMS = TimeUnit.SECONDS.toMillis(timeout); pingDatabase(maxTimeMS); return true; } catch (Exception e) { LOGGER.error(e.getMessage(), e); } } return false; } @Override public void doClose() throws SQLException { if (mongoDatabase != null) { mongoDatabase = null; } if (mongoClient != null) { mongoClient.close(); mongoClient = null; } if (sshTunnelClient != null) { try { sshTunnelClient.close(); } catch (SQLException e) { throw e; } catch (Exception e) { throw SqlError.createSQLException(LOGGER, SqlState.CONNECTION_EXCEPTION, e, SqlError.SSH_TUNNEL_ERROR, e.getMessage()); } finally { sshTunnelClient = null; } } } @SneakyThrows @Override public DatabaseMetaData getMetaData() throws SQLException { ensureDatabaseMetadata(); return metadata; } public MongoClient getMongoClient() { return mongoClient; } private void ensureDatabaseMetadata() throws SQLException { if (metadata == null) { final int version; if (connectionProperties.getRefreshSchema()) { version = VERSION_NEW; LOGGER.warn("The '{}' option is enabled and will cause a new" + " version of the SQL schema to be generated." + " This can lead to poor performance." + " Please disable this option when it is no longer needed.", REFRESH_SCHEMA.getName()); } else { version = VERSION_LATEST_OR_NEW; } setMetadata(version); } } private void setMetadata(final int version) throws SQLException { databaseMetadata = DocumentDbDatabaseSchemaMetadata.get( connectionProperties, connectionProperties.getSchemaName(), version, getMongoClient()); metadata = new DocumentDbDatabaseMetaData(this, databaseMetadata, connectionProperties); } void refreshDatabaseMetadata() throws SQLException { setMetadata(VERSION_NEW); } DocumentDbDatabaseSchemaMetadata getDatabaseMetadata() throws SQLException { ensureDatabaseMetadata(); return databaseMetadata; } @Override public String getSchema() { return connectionProperties.getDatabase(); } @Override public int getNetworkTimeout() throws SQLException { // TODO: Implement network timeout. throw new SQLFeatureNotSupportedException(); } @Override public void setNetworkTimeout(final Executor executor, final int milliseconds) throws SQLException { // TODO: Implement network timeout. throw new SQLFeatureNotSupportedException(); } @Override public java.sql.Statement createStatement(final int resultSetType, final int resultSetConcurrency) throws SQLException { verifyOpen(); if (resultSetType != ResultSet.TYPE_FORWARD_ONLY || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_RESULT_SET_TYPE); } return new DocumentDbStatement(this); } @Override public PreparedStatement prepareStatement(final String sql, final int resultSetType, final int resultSetConcurrency) throws SQLException { verifyOpen(); if (resultSetType != ResultSet.TYPE_FORWARD_ONLY || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_RESULT_SET_TYPE); } return new DocumentDbPreparedStatement(this, sql); } @Override public boolean isSupportedProperty(final String name) { return DocumentDbConnectionProperty.isSupportedProperty(name); } DocumentDbConnectionProperties getConnectionProperties() { return connectionProperties; } private void initializeClients(final DocumentDbConnectionProperties connectionProperties) throws SQLException { // Create the mongo client. mongoClient = connectionProperties.createMongoClient(getSshLocalPort()); mongoDatabase = mongoClient.getDatabase(connectionProperties.getDatabase()); pingDatabase(); } private void pingDatabase() throws SQLException { pingDatabase(0); } private void pingDatabase(final long maxTimeMS) throws SQLException { try { final String maxTimeMSOption = (maxTimeMS > 0) ? String.format(", \"maxTimeMS\" : %d", maxTimeMS) : ""; mongoDatabase.runCommand( Document.parse(String.format("{ \"ping\" : 1 %s }", maxTimeMSOption))); } catch (MongoSecurityException e) { // Check specifically for authorization error. if (e.getCode() == -4 && e.getCause() != null && e.getCause() instanceof MongoCommandException && ((MongoCommandException)e.getCause()).getCode() == 18) { throw SqlError.createSQLException(LOGGER, SqlState.INVALID_AUTHORIZATION_SPECIFICATION, e, SqlError.AUTHORIZATION_ERROR, mongoDatabase.getName(), e.getCredential().getUserName(), e.getCredential().getSource(), e.getCredential().getMechanism()); } // Everything else. throw SqlError.createSQLException(LOGGER, SqlState.SQL_CLIENT_UNABLE_TO_ESTABLISH_SQL_CONNECTION, e, SqlError.SECURITY_ERROR, e.getMessage()); } catch (Exception e) { throw new SQLException(e.getMessage(), e); } } }
4,572
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbListResultSet.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.List; class DocumentDbListResultSet extends DocumentDbAbstractResultSet { private final List<List<Object>> metaData; private final int rowCount; private int rowIndex = -1; DocumentDbListResultSet( final Statement statement, final ImmutableList<JdbcColumnMetaData> columnMetaData, final List<List<Object>> metaData) { super(statement, columnMetaData); this.metaData = metaData; this.rowCount = metaData.size(); } @Override protected Object getValue(final int columnIndex) { return metaData.get(getRowIndex()).get(columnIndex - 1); } @Override protected void doClose() { // no op } @Override protected int getDriverFetchSize() { return 0; } @Override protected void setDriverFetchSize(final int rows) { } @Override protected int getRowIndex() { // zero-indexed return rowIndex; } @Override protected int getRowCount() { return rowCount; } @Override public boolean next() throws SQLException { verifyOpen(); if (getRowIndex() < getRowCount()) { rowIndex++; } return getRowIndex() < getRowCount(); } @Override public boolean isBeforeFirst() { return getRowIndex() < 0; } @Override public boolean isAfterLast() { return getRowIndex() >= getRowCount(); } @Override public boolean isFirst() { return rowIndex == 0; } @Override public boolean isLast() { return getRowIndex() == getRowCount() - 1; } @Override public void beforeFirst() { rowIndex = -1; } @Override public void afterLast() { rowIndex = getRowCount(); } @Override public boolean first() { rowIndex = 0; return getRowIndex() < getRowCount(); } @Override public boolean last() { rowIndex = getRowCount() - 1; return getRowIndex() >= 0; } @Override public boolean absolute(final int row) { if (row > 0 && row < getRowCount()) { rowIndex = row - 1; return true; } else if (row < 0) { if (getRowCount() + row >= 0) { rowIndex = getRowCount() + row; return true; } return false; } else { rowIndex = -1; return false; } } @Override public boolean relative(final int rows) { final int proposedRowIndex = getRowIndex() + rows; if (proposedRowIndex < 0) { rowIndex = -1; return false; } else if (proposedRowIndex >= getRowCount()) { rowIndex = getRowCount(); return false; } rowIndex = proposedRowIndex; return true; } @Override public boolean previous() { if (getRowIndex() >= 0) { rowIndex--; } return getRowIndex() >= 0; } @Override public int getType() { return ResultSet.TYPE_SCROLL_INSENSITIVE; } @Override public int getConcurrency() { return ResultSet.CONCUR_READ_ONLY; } }
4,573
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbConnectionProperties.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.mongodb.MongoClientSettings; import com.mongodb.MongoCredential; import com.mongodb.MongoDriverInformation; import com.mongodb.ReadPreference; import com.mongodb.ServerAddress; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoClients; import com.mongodb.connection.SslSettings; import com.mongodb.event.ServerMonitorListener; import lombok.SneakyThrows; import nl.altindag.ssl.SSLFactory; import nl.altindag.ssl.util.CertificateUtils; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import javax.net.ssl.SSLContext; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Path; import java.nio.file.Paths; import java.security.cert.Certificate; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Properties; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; public class DocumentDbConnectionProperties extends Properties { public static final String DOCUMENT_DB_SCHEME = "jdbc:documentdb:"; public static final String USER_HOME_PROPERTY = "user.home"; private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbConnectionProperties.class.getName()); private static final Pattern WHITE_SPACE_PATTERN = Pattern.compile("^\\s*$"); private static final String ROOT_2019_PEM_RESOURCE_FILE_NAME = "/rds-ca-2019-root.pem"; private static final String ROOT_2021_PEM_RESOURCE_FILE_NAME = "/rds-prod-root-ca-2021.pem"; public static final String HOME_PATH_PREFIX_REG_EXPR = "^~[/\\\\].*$"; public static final int FETCH_SIZE_DEFAULT = 2000; public static final String DOCUMENTDB_CUSTOM_OPTIONS = "DOCUMENTDB_CUSTOM_OPTIONS"; private static String[] documentDbSearchPaths = null; static final String DEFAULT_APPLICATION_NAME; public static final String USER_HOME_PATH_NAME = System.getProperty(USER_HOME_PROPERTY); public static final String DOCUMENTDB_HOME_PATH_NAME = Paths.get( USER_HOME_PATH_NAME, ".documentdb").toString(); public static final String CONNECTION_STRING_TEMPLATE = "//%s%s/%s%s"; static { DEFAULT_APPLICATION_NAME = DocumentDbDriver.DEFAULT_APPLICATION_NAME; } /** * Enumeration of type of validation. */ public enum ValidationType { /** * No validation. */ NONE, /** * Validate client connection required properties. */ CLIENT, /** * Validate SSH tunnel required properties. */ SSH_TUNNEL, } /** * Constructor for DocumentDbConnectionProperties, initializes with given properties. * * @param properties Properties to initialize with. */ public DocumentDbConnectionProperties(final Properties properties) { // Copy properties. this.putAll(properties); } /** * Constructor for DocumentDbConnectionProperties. Initialized with empty properties. */ public DocumentDbConnectionProperties() { super(); } /** * Gets the search paths when trying to locate the SSH private key file. * * @return an array of search paths. */ public static String[] getDocumentDbSearchPaths() { if (documentDbSearchPaths == null) { documentDbSearchPaths = new String[]{ USER_HOME_PATH_NAME, DOCUMENTDB_HOME_PATH_NAME, getClassPathLocation(), }; } return documentDbSearchPaths.clone(); } /** * Gets the parent folder location of the current class. * * @return a string representing the parent folder location of the current class. */ public static String getClassPathLocation() { String classPathLocation = null; final URL classPathLocationUrl = DocumentDbConnectionProperties.class .getProtectionDomain() .getCodeSource() .getLocation(); Path classPath = null; try { // Attempt to get file path from URL path. classPath = Paths.get(classPathLocationUrl.getPath()); } catch (InvalidPathException e) { try { // If we fail to get path from URL, try the URI. classPath = Paths.get(classPathLocationUrl.toURI()); } catch (IllegalArgumentException | URISyntaxException ex) { LOGGER.error(ex.getMessage(), ex); // Ignore error, return null. } } if (classPath != null) { final Path classParentPath = classPath.getParent(); if (classParentPath != null) { classPathLocation = classParentPath.toString(); } } return classPathLocation; } /** * Return MongoDriverInformation object. It will initialize the Object with application name * and driver version. * * @return MongoDriverInformation */ private MongoDriverInformation getMongoDriverInformation() { final MongoDriverInformation mongoDriverInformation = MongoDriverInformation.builder() .driverName(getApplicationName()) .driverVersion(DocumentDbDriver.DRIVER_VERSION) .build(); return mongoDriverInformation; } /** * Gets the hostname. * * @return The hostname to connect to. */ public String getHostname() { return getProperty(DocumentDbConnectionProperty.HOSTNAME.getName()); } /** * Sets the hostname. * * @param hostname The hostname to connect to. */ public void setHostname(final String hostname) { setProperty(DocumentDbConnectionProperty.HOSTNAME.getName(), hostname); } /** * Gets the username. * * @return The username to authenticate with. */ public String getUser() { return getProperty(DocumentDbConnectionProperty.USER.getName()); } /** * Sets the user. * * @param user The username to authenticate with. */ public void setUser(final String user) { setProperty(DocumentDbConnectionProperty.USER.getName(), user); } /** * Gets the password. * * @return The password to authenticate with. */ public String getPassword() { return getProperty(DocumentDbConnectionProperty.PASSWORD.getName()); } /** * Sets the password. * * @param password The password to authenticate with. */ public void setPassword(final String password) { setProperty(DocumentDbConnectionProperty.PASSWORD.getName(), password); } /** * Gets the database name. * * @return The database to connect to. */ public String getDatabase() { return getProperty(DocumentDbConnectionProperty.DATABASE.getName()); } /** * Sets the database name. * * @param database The database to connect to. */ public void setDatabase(final String database) { setProperty(DocumentDbConnectionProperty.DATABASE.getName(), database); } /** * Gets the application name. * * @return The name of the application. */ public String getApplicationName() { return getProperty( DocumentDbConnectionProperty.APPLICATION_NAME.getName(), DocumentDbConnectionProperty.APPLICATION_NAME.getDefaultValue()); } /** * Sets the application name. * * @param applicationName The name of the application. */ public void setApplicationName(final String applicationName) { setProperty(DocumentDbConnectionProperty.APPLICATION_NAME.getName(), applicationName); } /** * Gets the replica set name. * * @return The name of the replica set to connect to. */ public String getReplicaSet() { return getProperty(DocumentDbConnectionProperty.REPLICA_SET.getName()); } /** * Sets the replica set name. * * @param replicaSet The name of the replica set to connect to. */ public void setReplicaSet(final String replicaSet) { setProperty(DocumentDbConnectionProperty.REPLICA_SET.getName(), replicaSet); } /** * Gets TLS enabled flag. * * @return tlsEnabled {@code true} if TLS/SSL is enabled; {@code false} otherwise. */ public boolean getTlsEnabled() { return Boolean.parseBoolean( getProperty( DocumentDbConnectionProperty.TLS_ENABLED.getName(), DocumentDbConnectionProperty.TLS_ENABLED.getDefaultValue())); } /** * Sets TLS enabled flag. * * @param tlsEnabled {@code true} if TLS/SSL is enabled; {@code false} otherwise. */ public void setTlsEnabled(final String tlsEnabled) { setProperty(DocumentDbConnectionProperty.TLS_ENABLED.getName(), tlsEnabled); } /** * Gets allow invalid hostnames flag for TLS connections. * * @return {@code true} if invalid host names are allowed; {@code false} otherwise. */ public boolean getTlsAllowInvalidHostnames() { return Boolean.parseBoolean( getProperty( DocumentDbConnectionProperty.TLS_ALLOW_INVALID_HOSTNAMES.getName(), DocumentDbConnectionProperty.TLS_ALLOW_INVALID_HOSTNAMES .getDefaultValue())); } /** * Sets allow invalid hostnames flag for TLS connections. * * @param allowInvalidHostnames Whether invalid hostnames are allowed when connecting with * TLS/SSL. */ public void setTlsAllowInvalidHostnames(final String allowInvalidHostnames) { setProperty( DocumentDbConnectionProperty.TLS_ALLOW_INVALID_HOSTNAMES.getName(), allowInvalidHostnames); } /** * Gets retry reads flag. * * @return {@code true} if the driver should retry read operations if they fail due to a network * error; {@code false} otherwise. */ public Boolean getRetryReadsEnabled() { return Boolean.parseBoolean( getProperty( DocumentDbConnectionProperty.RETRY_READS_ENABLED.getName(), DocumentDbConnectionProperty.RETRY_READS_ENABLED.getDefaultValue())); } /** * Sets retry reads flag. * * @param retryReadsEnabled Whether the driver should retry read operations if they fail due to * a network error */ public void setRetryReadsEnabled(final String retryReadsEnabled) { setProperty(DocumentDbConnectionProperty.RETRY_READS_ENABLED.getName(), retryReadsEnabled); } /** * Get the timeout for opening a connection. * * @return The connection timeout in seconds. */ public Integer getLoginTimeout() { return getPropertyAsInteger(DocumentDbConnectionProperty.LOGIN_TIMEOUT_SEC.getName()); } /** * Sets the timeout for opening a connection. * * @param timeout The connection timeout in seconds. */ public void setLoginTimeout(final String timeout) { setProperty(DocumentDbConnectionProperty.LOGIN_TIMEOUT_SEC.getName(), timeout); } /** * Gets the read preference when connecting as a replica set. * * @return The read preference as a ReadPreference object. */ public DocumentDbReadPreference getReadPreference() { return getPropertyAsReadPreference(DocumentDbConnectionProperty.READ_PREFERENCE.getName()); } /** * Sets the read preference when connecting as a replica set. * * @param readPreference The name of the read preference. */ public void setReadPreference(final String readPreference) { setProperty(DocumentDbConnectionProperty.READ_PREFERENCE.getName(), readPreference); } /** * Gets the method of scanning for metadata. * * @return The method of scanning for metadata. */ public DocumentDbMetadataScanMethod getMetadataScanMethod() { return getPropertyAsScanMethod(DocumentDbConnectionProperty.METADATA_SCAN_METHOD.getName()); } /** * Sets the method of scanning for metadata. * * @param method The name of the scan method. */ public void setMetadataScanMethod(final String method) { setProperty(DocumentDbConnectionProperty.METADATA_SCAN_METHOD.getName(), method); } /** * Gets the number of records to scan while determining schema. * * @return Integer representing the number of records to scan. */ public int getMetadataScanLimit() { return getPropertyAsInteger(DocumentDbConnectionProperty.METADATA_SCAN_LIMIT.getName()); } /** * Sets the number of records to scan while determining schema. * * @param limit The name of the read preference. */ public void setMetadataScanLimit(final String limit) { setProperty(DocumentDbConnectionProperty.METADATA_SCAN_LIMIT.getName(), limit); } /** * Gets the schema name for persisted schema. * * @return the name of the schema. */ public String getSchemaName() { return getProperty(DocumentDbConnectionProperty.SCHEMA_NAME.getName(), DocumentDbConnectionProperty.SCHEMA_NAME.getDefaultValue()); } /** * Sets the schema name for persisted schema. * * @param schemaName the name of the schema. */ public void setSchemaName(final String schemaName) { setProperty(DocumentDbConnectionProperty.SCHEMA_NAME.getName(), schemaName); } /** * Sets the TLS CA file path. * * @param tlsCAFilePath the TLS CA file path. */ public void setTlsCAFilePath(final String tlsCAFilePath) { setProperty(DocumentDbConnectionProperty.TLS_CA_FILE.getName(), tlsCAFilePath); } /** * Gets the TLS CA file path. * * @return a String representing the TLS CA file path, if set, null otherwise. */ public String getTlsCAFilePath() { return getProperty(DocumentDbConnectionProperty.TLS_CA_FILE.getName()); } /** * Sets the SSH tunnel user. * * @param sshUser the SSH tunnel user. */ public void setSshUser(final String sshUser) { setProperty(DocumentDbConnectionProperty.SSH_USER.getName(), sshUser); } /** * Gets the SSH tunnel user. * * @return the SSH tunnel user. */ public String getSshUser() { return getProperty(DocumentDbConnectionProperty.SSH_USER.getName()); } /** * Sets the SSH tunnel host name. Can optionally contain the port number using 'host-name:port' * syntax. If port is not provided, port 22 is assumed. * * @param sshHostname the SSH tunnel host name and optional port number. */ public void setSshHostname(final String sshHostname) { setProperty(DocumentDbConnectionProperty.SSH_HOSTNAME.getName(), sshHostname); } /** * Gets the SSH tunnel host name and optional port number. * * @return the SSH tunnel host name and optional port number. */ public String getSshHostname() { return getProperty(DocumentDbConnectionProperty.SSH_HOSTNAME.getName()); } /** * Sets the file path of the private key file. Can be prefixed with '~' to indicate the * current user's home directory. * * @param sshPrivateKeyFile the file path of the private key file. */ public void setSshPrivateKeyFile(final String sshPrivateKeyFile) { setProperty(DocumentDbConnectionProperty.SSH_PRIVATE_KEY_FILE.getName(), sshPrivateKeyFile); } /** * Gets the file path of the private key file. * * @return the file path of the private key file. */ public String getSshPrivateKeyFile() { return getProperty(DocumentDbConnectionProperty.SSH_PRIVATE_KEY_FILE.getName()); } /** * Sets the passphrase of the private key file. If not set, no passphrase will be used. * * @param sshPrivateKeyPassphrase the passphrase of the private key file */ public void setSshPrivateKeyPassphrase(final String sshPrivateKeyPassphrase) { setProperty( DocumentDbConnectionProperty.SSH_PRIVATE_KEY_PASSPHRASE.getName(), sshPrivateKeyPassphrase); } /** * Gets the passphrase of the private key file. * * @return the passphrase of the private key file */ public String getSshPrivateKeyPassphrase() { return getProperty(DocumentDbConnectionProperty.SSH_PRIVATE_KEY_PASSPHRASE.getName()); } /** * Sets the indicator for whether the SSH tunnel will perform strict host key checking. When * {@code true}, the 'known_hosts' file is checked to ensure the hashed host key is the same * as the target host. * * @param sshStrictHostKeyChecking the indicator for whether the SSH tunnel will perform strict * host key checking. */ public void setSshStrictHostKeyChecking(final String sshStrictHostKeyChecking) { setProperty( DocumentDbConnectionProperty.SSH_STRICT_HOST_KEY_CHECKING.getName(), String.valueOf(Boolean.parseBoolean(sshStrictHostKeyChecking))); } /** * Gets the indicator for whether the SSH tunnel will perform strict host key checking. * * @return the indicator for whether the SSH tunnel will perform strict host key checking. */ public boolean getSshStrictHostKeyChecking() { return Boolean.parseBoolean(getProperty( DocumentDbConnectionProperty.SSH_STRICT_HOST_KEY_CHECKING.getName(), DocumentDbConnectionProperty.SSH_STRICT_HOST_KEY_CHECKING.getDefaultValue())); } /** * Gets the file path to the 'known_hosts' file. If not set, '~/.ssh/known_hosts' is assumed. * * @param sshKnownHostsFile the file path to the 'known_hosts' file. */ public void setSshKnownHostsFile(final String sshKnownHostsFile) { setProperty(DocumentDbConnectionProperty.SSH_KNOWN_HOSTS_FILE.getName(), sshKnownHostsFile); } /** * Gets the file path to the 'known_hosts' file. * * @return the file path to the 'known_hosts' file. */ public String getSshKnownHostsFile() { return getProperty(DocumentDbConnectionProperty.SSH_KNOWN_HOSTS_FILE.getName()); } /** * Sets the default fetch size (in records) when retrieving results from Amazon DocumentDB. * It is the number of records to retrieve in a single batch. * The maximum number of records retrieved in a single batch may also be limited by the overall * memory size of the result. The value can be changed by calling the `Statement.setFetchSize` * JDBC method. Default is '2000'. * * @param defaultFetchSize the default fetch size (in records) when retrieving results from Amazon DocumentDB. */ public void setDefaultFetchSize(final String defaultFetchSize) { setProperty(DocumentDbConnectionProperty.DEFAULT_FETCH_SIZE.getName(), defaultFetchSize); } /** * Gets the default fetch size (in records) when retrieving results from Amazon DocumentDB. * It is the number of records to retrieve in a single batch. * The maximum number of records retrieved in a single batch may also be limited by the overall * memory size of the result. The value can be changed by calling the `Statement.setFetchSize` * JDBC method. Default is '2000'. * * @return the default fetch size (in records) when retrieving results from Amazon DocumentDB. */ public Integer getDefaultFetchSize() { return getPropertyAsInteger(DocumentDbConnectionProperty.DEFAULT_FETCH_SIZE.getName()); } /** * Sets indicator of whether to refresh any existing schema with a newly generated schema when * the connection first requires the schema. Note that this will remove any existing schema * customizations and will reduce performance for the first query or metadata inquiry. * * @param refreshSchema indicator of whether to refresh any existing schema with a newly * generated schema when the connection first requires the schema. * Note that this will remove any existing schema customizations and * will reduce performance for the first query or metadata inquiry. */ public void setRefreshSchema(final String refreshSchema) { setProperty(DocumentDbConnectionProperty.REFRESH_SCHEMA.getName(), refreshSchema); } /** * Gets indicator of whether to refresh any existing schema with a newly generated schema when * the connection first requires the schema. Note that this will remove any existing schema * customizations and will reduce performance for the first query or metadata inquiry. * * @return indicator of whether to refresh any existing schema with a newly generated schema * when the connection first requires the schema. Note that this will remove any * existing schema customizations and will reduce performance for the first query or * metadata inquiry. */ public Boolean getRefreshSchema() { return Boolean.parseBoolean(getProperty( DocumentDbConnectionProperty.REFRESH_SCHEMA.getName(), DocumentDbConnectionProperty.REFRESH_SCHEMA.getDefaultValue())); } /** * Sets the default authentication database name. * * @param databaseName the name of the authentication database. */ public void setDefaultAuthenticationDatabase(final String databaseName) { setProperty(DocumentDbConnectionProperty.DEFAULT_AUTH_DB.getName(), databaseName); } /** * Gets the default authentication database name. * * @return the name of the authentication database. */ public String getDefaultAuthenticationDatabase() { return getProperty( DocumentDbConnectionProperty.DEFAULT_AUTH_DB.getName(), DocumentDbConnectionProperty.DEFAULT_AUTH_DB.getDefaultValue()); } /** * Sets the allow disk use option. * * @param allowDiskUseOption the disk use option to set. */ public void setAllowDiskUseOption(final String allowDiskUseOption) { setProperty(DocumentDbConnectionProperty.ALLOW_DISK_USE.getName(), allowDiskUseOption); } /** * Gets the allow disk use option. * * @return the disk use option, or null, if invalid or not set. */ public DocumentDbAllowDiskUseOption getAllowDiskUseOption() { return getPropertyAsAllowDiskUseOption(DocumentDbConnectionProperty.ALLOW_DISK_USE.getName()); } /** * Creates a {@link MongoClient} instance from the connection properties. * * @return a new instance of a {@link MongoClient}. */ public MongoClient createMongoClient() { return MongoClients.create( buildMongoClientSettings(), getMongoDriverInformation()); } /** * Creates a {@link MongoClient} instance from the connection properties using * the SSH tunnel port on the local host. * * @return a new instance of a {@link MongoClient}. */ public MongoClient createMongoClient(final int sshLocalPort) { return MongoClients.create( buildMongoClientSettings(sshLocalPort), getMongoDriverInformation()); } /** * Builds the MongoClientSettings from properties. * * @return a {@link MongoClientSettings} object. */ public MongoClientSettings buildMongoClientSettings() { return buildMongoClientSettings(null); } /** * Builds the MongoClientSettings from properties. * * @param sshLocalPort the local port number for an internal SSH tunnel. A port number of zero * indicates there is no valid internal SSH tunnel started. * @return a {@link MongoClientSettings} object. */ public MongoClientSettings buildMongoClientSettings(final int sshLocalPort) { return buildMongoClientSettings(null, sshLocalPort); } /** * Builds the MongoClientSettings from properties. * * @param serverMonitorListener the server monitor listener * @return a {@link MongoClientSettings} object. */ public MongoClientSettings buildMongoClientSettings( final ServerMonitorListener serverMonitorListener) { return buildMongoClientSettings(serverMonitorListener, 0); } /** * Builds the MongoClientSettings from properties. * * @param serverMonitorListener the server monitor listener * @param sshLocalPort the local port number for an internal SSH tunnel. A port number of zero * indicates there is no valid internal SSH tunnel started. * @return a {@link MongoClientSettings} object. */ public MongoClientSettings buildMongoClientSettings( final ServerMonitorListener serverMonitorListener, final int sshLocalPort) { final MongoClientSettings.Builder clientSettingsBuilder = MongoClientSettings.builder(); // Create credential for authentication database. final String user = getUser(); final String password = getPassword(); if (user != null && password != null) { final MongoCredential credential = MongoCredential.createCredential(user, getDefaultAuthenticationDatabase(), password.toCharArray()); clientSettingsBuilder.credential(credential); } // Set the server configuration. applyServerSettings(clientSettingsBuilder, serverMonitorListener); // Set the cluster configuration. applyClusterSettings(clientSettingsBuilder, sshLocalPort); // Set the socket configuration. applySocketSettings(clientSettingsBuilder); // Set the SSL/TLS configuration. applyTlsSettings(clientSettingsBuilder); // Set the read preference. final DocumentDbReadPreference readPreference = getReadPreference(); if (readPreference != null) { clientSettingsBuilder.readPreference(ReadPreference.valueOf( readPreference.getName())); } // Get retry reads. final boolean retryReads = getRetryReadsEnabled(); clientSettingsBuilder .applicationName(getApplicationName()) .retryReads(retryReads) // NOTE: DocumentDB does not support retryWrites option. (2020-05-13) // https://docs.aws.amazon.com/documentdb/latest/developerguide/functional-differences.html#functional-differences.retryable-writes .retryWrites(false) .build(); return clientSettingsBuilder.build(); } /** * Builds the sanitized connection string from properties. * * @return a {@link String} with the sanitized connection properties. */ public @NonNull String buildSanitizedConnectionString() { final String loginInfo = buildLoginInfo(getUser(), null); final String hostInfo = buildHostInfo(getHostname()); final String databaseInfo = buildDatabaseInfo(getDatabase()); final StringBuilder optionalInfo = new StringBuilder(); buildSanitizedOptionalInfo(optionalInfo, this); return buildConnectionString(loginInfo, hostInfo, databaseInfo, optionalInfo.toString()); } @NonNull static String buildDatabaseInfo(final @Nullable String database) { return isNullOrWhitespace(database) ? "" : encodeValue(database); } @NonNull static String buildHostInfo(final @Nullable String hostname) { return isNullOrWhitespace(hostname) ? "" : hostname; } @NonNull static String buildLoginInfo(final @Nullable String user, final @Nullable String password) { final String userString = isNullOrWhitespace(user) ? "" : encodeValue(user); final String passwordString = isNullOrWhitespace(password) ? "" : ":" + encodeValue(password); final String userInfo = isNullOrWhitespace(userString) && isNullOrWhitespace(passwordString) ? "" : "@"; return userString + passwordString + userInfo; } static @NonNull String buildConnectionString( final String loginInfo, final String hostInfo, final String databaseInfo, final String optionalInfo) { return String.format(CONNECTION_STRING_TEMPLATE, loginInfo, hostInfo, databaseInfo, optionalInfo); } /** * Builds the sanitized optional info connection string. I does not include * sensitive options like SSH_PRIVATE_KEY_PASSPHRASE. * * @param optionalInfo the connection string to build. */ static void buildSanitizedOptionalInfo( final StringBuilder optionalInfo, final DocumentDbConnectionProperties properties) { maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.APPLICATION_NAME, properties.getApplicationName()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.LOGIN_TIMEOUT_SEC, properties.getLoginTimeout()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.METADATA_SCAN_LIMIT, properties.getMetadataScanLimit()); maybeAppendOptionalValue(optionalInfo, properties.getMetadataScanMethod()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.RETRY_READS_ENABLED, properties.getRetryReadsEnabled()); maybeAppendOptionalValue(optionalInfo, properties.getReadPreference()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.REPLICA_SET, properties.getReplicaSet(), null); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.TLS_ENABLED, properties.getTlsEnabled()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.TLS_ALLOW_INVALID_HOSTNAMES, properties.getTlsAllowInvalidHostnames()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.TLS_CA_FILE, properties.getTlsCAFilePath(), null); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.SCHEMA_NAME, properties.getSchemaName()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.SSH_USER, properties.getSshUser(), null); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.SSH_HOSTNAME, properties.getSshHostname(), null); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.SSH_PRIVATE_KEY_FILE, properties.getSshPrivateKeyFile(), null); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.SSH_STRICT_HOST_KEY_CHECKING, properties.getSshStrictHostKeyChecking()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.SSH_KNOWN_HOSTS_FILE, properties.getSshKnownHostsFile(), null); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.DEFAULT_FETCH_SIZE, properties.getDefaultFetchSize()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.REFRESH_SCHEMA, properties.getRefreshSchema()); maybeAppendOptionalValue(optionalInfo, DocumentDbConnectionProperty.DEFAULT_AUTH_DB, properties.getDefaultAuthenticationDatabase()); maybeAppendOptionalValue(optionalInfo, properties.getAllowDiskUseOption()); } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbConnectionProperty property, final String value) { if (!property.getDefaultValue().equals(value)) { appendOption(optionalInfo, property, value); } } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbConnectionProperty property, final String value, final String defaultValue) { if (!Objects.equals(defaultValue, value)) { appendOption(optionalInfo, property, value); } } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbConnectionProperty property, final int value) { if (value != Integer.parseInt(property.getDefaultValue())) { appendOption(optionalInfo, property, value); } } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbConnectionProperty property, final boolean value) { if (value != Boolean.parseBoolean(property.getDefaultValue())) { appendOption(optionalInfo, property, value); } } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbMetadataScanMethod value) { if (value != DocumentDbMetadataScanMethod.fromString( DocumentDbConnectionProperty.METADATA_SCAN_METHOD.getDefaultValue())) { appendOption(optionalInfo, DocumentDbConnectionProperty.METADATA_SCAN_METHOD, value.getName()); } } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbReadPreference value) { if (value != null) { appendOption(optionalInfo, DocumentDbConnectionProperty.READ_PREFERENCE, value.getName()); } } static void maybeAppendOptionalValue(final StringBuilder optionalInfo, final DocumentDbAllowDiskUseOption value) { if (value != DocumentDbAllowDiskUseOption.fromString( DocumentDbConnectionProperty.ALLOW_DISK_USE.getDefaultValue())) { appendOption(optionalInfo, DocumentDbConnectionProperty.ALLOW_DISK_USE, value.getName()); } } /** * Builds the connection string for SSH properties. * * @return a connection string with SSH properties. */ public String buildSshConnectionString() { final String loginInfo = ""; final String hostInfo = buildHostInfo(getHostname()); final String databaseInfo = ""; final StringBuilder optionalInfo = new StringBuilder(); buildSshOptionalInfo(optionalInfo); return buildConnectionString(loginInfo, hostInfo, databaseInfo, optionalInfo.toString()); } private void buildSshOptionalInfo(final StringBuilder optionalInfo) { if (getSshUser() != null) { appendOption(optionalInfo, DocumentDbConnectionProperty.SSH_USER, getSshUser()); } if (getSshHostname() != null) { appendOption(optionalInfo, DocumentDbConnectionProperty.SSH_HOSTNAME, getSshHostname()); } if (getSshPrivateKeyFile() != null) { appendOption(optionalInfo, DocumentDbConnectionProperty.SSH_PRIVATE_KEY_FILE, getSshPrivateKeyFile()); } if (getSshPrivateKeyPassphrase() != null && !DocumentDbConnectionProperty.SSH_PRIVATE_KEY_PASSPHRASE.getDefaultValue().equals(getSshPrivateKeyPassphrase())) { appendOption(optionalInfo, DocumentDbConnectionProperty.SSH_PRIVATE_KEY_PASSPHRASE, getSshPrivateKeyPassphrase()); } if (getSshStrictHostKeyChecking() != Boolean.parseBoolean(DocumentDbConnectionProperty.SSH_STRICT_HOST_KEY_CHECKING.getDefaultValue())) { appendOption(optionalInfo, DocumentDbConnectionProperty.SSH_STRICT_HOST_KEY_CHECKING, getSshStrictHostKeyChecking()); } if (getSshKnownHostsFile() != null && !DocumentDbConnectionProperty.SSH_KNOWN_HOSTS_FILE.getDefaultValue().equals(getSshKnownHostsFile())) { appendOption(optionalInfo, DocumentDbConnectionProperty.SSH_KNOWN_HOSTS_FILE, getSshKnownHostsFile()); } } /** * Appends an option and value to the string. * * @param optionInfo the connection string to build. * @param option the option to add. * @param optionValue the option value to set. */ public static void appendOption(final StringBuilder optionInfo, final DocumentDbConnectionProperty option, final Object optionValue) { optionInfo.append(optionInfo.length() == 0 ? "?" : "&"); optionInfo.append(option.getName()) .append("=") .append(optionValue == null ? "" : encodeValue(optionValue.toString())); } /** * Encodes a value into URL encoded value. * * @param value the value to encode. * @return the encoded value. */ public static String encodeValue(final String value) { try { return URLEncoder.encode(value, StandardCharsets.UTF_8.toString()); } catch (UnsupportedEncodingException e) { return value; } } /** * Validates the existing properties. * @throws SQLException if the required properties are not correctly set. */ public void validateRequiredProperties() throws SQLException { validateRequiredProperties(ValidationType.CLIENT); } /** * Validates the existing properties. * @param validationType Which validation type to perform. * @throws SQLException if the required properties are not correctly set. */ public void validateRequiredProperties(final ValidationType validationType) throws SQLException { if ((isNullOrWhitespace(getUser()) || isNullOrWhitespace(getPassword())) && validationType == ValidationType.CLIENT) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.MISSING_USER_PASSWORD ); } if (isNullOrWhitespace(getDatabase()) && validationType == ValidationType.CLIENT) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.MISSING_DATABASE ); } if (isNullOrWhitespace(getHostname()) && (validationType == ValidationType.CLIENT || validationType == ValidationType.SSH_TUNNEL)) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.MISSING_HOSTNAME ); } if (isNullOrWhitespace(getSshUser()) && validationType == ValidationType.SSH_TUNNEL) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.MISSING_SSH_USER ); } if (isNullOrWhitespace(getSshHostname()) && validationType == ValidationType.SSH_TUNNEL) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.MISSING_SSH_HOSTNAME ); } if (isNullOrWhitespace(getSshPrivateKeyFile()) && validationType == ValidationType.SSH_TUNNEL) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.MISSING_SSH_PRIVATE_KEY_FILE ); } } /** * Gets the connection properties from the connection string. * * @param documentDbUrl the given properties. * @return a {@link DocumentDbConnectionProperties} with the properties set. * @throws SQLException if connection string is invalid. */ public static DocumentDbConnectionProperties getPropertiesFromConnectionString(final String documentDbUrl) throws SQLException { return getPropertiesFromConnectionString(new Properties(), documentDbUrl, DOCUMENT_DB_SCHEME); } /** * Gets the connection properties from the connection string. * * @param documentDbUrl the given properties. * @param validationType Which properties to validate. * @return a {@link DocumentDbConnectionProperties} with the properties set. * @throws SQLException if connection string is invalid. */ public static DocumentDbConnectionProperties getPropertiesFromConnectionString( final String documentDbUrl, final ValidationType validationType) throws SQLException { return getPropertiesFromConnectionString(new Properties(), documentDbUrl, DOCUMENT_DB_SCHEME, validationType); } /** * Gets the connection properties from the connection string. * * @param info the given properties. * @param documentDbUrl the connection string. * @param connectionStringPrefix the connection string prefix. * @return a {@link DocumentDbConnectionProperties} with the properties set. * @throws SQLException if connection string is invalid. */ public static DocumentDbConnectionProperties getPropertiesFromConnectionString( final Properties info, final String documentDbUrl, final String connectionStringPrefix) throws SQLException { return getPropertiesFromConnectionString(info, documentDbUrl, connectionStringPrefix, ValidationType.CLIENT); } /** * Gets the connection properties from the connection string. * * @param info the given properties. * @param documentDbUrl the connection string. * @param connectionStringPrefix the connection string prefix. * @param validationType Which validation to perform. * @return a {@link DocumentDbConnectionProperties} with the properties set. * @throws SQLException if connection string is invalid. */ public static DocumentDbConnectionProperties getPropertiesFromConnectionString( final Properties info, final String documentDbUrl, final String connectionStringPrefix, final ValidationType validationType) throws SQLException { final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(info); final String postSchemeSuffix = documentDbUrl.substring(connectionStringPrefix.length()); if (!isNullOrWhitespace(postSchemeSuffix)) { try { final URI uri = new URI(postSchemeSuffix); setHostName(properties, uri, validationType); setUserPassword(properties, uri, validationType); setDatabase(properties, uri, validationType); setOptionalProperties(properties, uri); setCustomOptions(properties); } catch (URISyntaxException e) { throw SqlError.createSQLException( LOGGER, SqlState.CONNECTION_FAILURE, e, SqlError.INVALID_CONNECTION_PROPERTIES, documentDbUrl + " : '" + e.getMessage() + "'" ); } catch (UnsupportedEncodingException e) { throw new SQLException(e.getMessage(), e); } } properties.validateRequiredProperties(validationType); return properties; } static void setCustomOptions(final DocumentDbConnectionProperties properties) { final String customOptions = System.getenv(DOCUMENTDB_CUSTOM_OPTIONS); if (customOptions == null) { return; } final String[] propertyPairs = customOptions.split(";"); for (String pair : propertyPairs) { final int splitIndex = pair.indexOf("="); final String key = pair.substring(0, splitIndex); final String value = pair.substring(1 + splitIndex); addPropertyIfValid(properties, key, value, true, true); } } private static void setDatabase( final Properties properties, final URI mongoUri, final ValidationType validationType) throws SQLException { if (isNullOrWhitespace(mongoUri.getPath())) { if (properties.getProperty(DocumentDbConnectionProperty.DATABASE.getName(), null) == null && validationType == ValidationType.CLIENT) { throw SqlError.createSQLException( LOGGER, SqlState.CONNECTION_FAILURE, SqlError.MISSING_DATABASE); } return; } final String database = mongoUri.getPath().substring(1); addPropertyIfNotSet(properties, DocumentDbConnectionProperty.DATABASE.getName(), database); } private static void setOptionalProperties(final Properties properties, final URI mongoUri) throws UnsupportedEncodingException { final String query = mongoUri.getQuery(); if (isNullOrWhitespace(query)) { return; } final String[] propertyPairs = query.split("&"); for (String pair : propertyPairs) { final int splitIndex = pair.indexOf("="); final String key = pair.substring(0, splitIndex); final String value = pair.substring(1 + splitIndex); addPropertyIfValid(properties, key, value, false, false); } } private static void setUserPassword( final Properties properties, final URI mongoUri, final ValidationType validationType) throws UnsupportedEncodingException, SQLException { if (mongoUri.getUserInfo() == null) { if ((properties.getProperty(DocumentDbConnectionProperty.USER.getName(), null) == null && validationType == ValidationType.CLIENT) || (properties.getProperty(DocumentDbConnectionProperty.PASSWORD.getName(), null) == null && validationType == ValidationType.CLIENT)) { throw SqlError.createSQLException( LOGGER, SqlState.CONNECTION_FAILURE, SqlError.MISSING_USER_PASSWORD); } return; } final String userPassword = mongoUri.getUserInfo(); // Password is optional final int userPasswordSeparatorIndex = userPassword.indexOf(":"); if (userPasswordSeparatorIndex >= 0) { addPropertyIfNotSet(properties, DocumentDbConnectionProperty.USER.getName(), userPassword.substring(0, userPasswordSeparatorIndex)); addPropertyIfNotSet(properties, DocumentDbConnectionProperty.PASSWORD.getName(), userPassword.substring(userPasswordSeparatorIndex + 1)); } else { addPropertyIfNotSet(properties, DocumentDbConnectionProperty.USER.getName(), userPassword); } } private static void setHostName( final Properties properties, final URI uri, final ValidationType validationType) throws SQLException { String hostName = uri.getHost(); if (hostName == null) { if (properties.getProperty(DocumentDbConnectionProperty.HOSTNAME.getName(), null) == null && (validationType == ValidationType.CLIENT || validationType == ValidationType.SSH_TUNNEL)) { throw SqlError.createSQLException( LOGGER, SqlState.CONNECTION_FAILURE, SqlError.MISSING_HOSTNAME); } return; } if (uri.getPort() > 0) { hostName += ":" + uri.getPort(); } addPropertyIfNotSet(properties, DocumentDbConnectionProperty.HOSTNAME.getName(), hostName); } private static void addPropertyIfValid( final Properties info, final String propertyKey, final String propertyValue, final boolean allowUnsupported, final boolean allowUnknown) { if (DocumentDbConnectionProperty.isSupportedProperty(propertyKey)) { addPropertyIfNotSet(info, propertyKey, propertyValue); } else if (DocumentDbConnectionProperty.isUnsupportedMongoDBProperty(propertyKey)) { if (allowUnsupported) { LOGGER.warn( "Adding unsupported MongoDB property: {{}} it may not supported by the driver or server.", propertyKey); addPropertyIfNotSet(info, propertyKey, propertyValue); } else { LOGGER.warn( "Ignored MongoDB property: {{}} as it not supported by the driver.", propertyKey); } } else { if (allowUnknown) { LOGGER.warn( "Adding unknown MongoDB property: {{}} it may not supported by the driver or server.", propertyKey); addPropertyIfNotSet(info, propertyKey, propertyValue); } else { LOGGER.warn("Ignored invalid property: {{}}", propertyKey); } } } private static void addPropertyIfNotSet( @NonNull final Properties info, @NonNull final String key, @Nullable final String value) { if (!isNullOrWhitespace(value)) { info.putIfAbsent(key, value); } } /** * Applies the server-related connection properties to the given client settings builder. * * @param clientSettingsBuilder The client settings builder to apply the properties to. * @param serverMonitorListener The server monitor listener to add as an event listener. */ private void applyServerSettings( final MongoClientSettings.Builder clientSettingsBuilder, final ServerMonitorListener serverMonitorListener) { clientSettingsBuilder.applyToServerSettings( b -> { if (serverMonitorListener != null) { b.addServerMonitorListener(serverMonitorListener); } }); } /** * Applies the cluster-related connection properties to the given client settings builder. * @param clientSettingsBuilder The client settings builder to apply the properties to. */ private void applyClusterSettings( final MongoClientSettings.Builder clientSettingsBuilder, final int sshLocalPort) { final String host; if (enableSshTunnel() && isSshPrivateKeyFileExists() && sshLocalPort > 0) { host = String.format("localhost:%d", sshLocalPort); } else { host = getHostname(); } final String replicaSetName = getReplicaSet(); clientSettingsBuilder.applyToClusterSettings( b -> { if (host != null) { b.hosts(Collections.singletonList(new ServerAddress(host))); } if (replicaSetName != null) { b.requiredReplicaSetName(replicaSetName); } }); } /** * Gets indicator of whether the options indicate the SSH port forwarding tunnel * should be enabled. * * @return {@code true} if the SSH port forwarding tunnel should be enabled, * otherwise {@code false}. */ public boolean enableSshTunnel() { return !isNullOrWhitespace(getSshUser()) && !isNullOrWhitespace(getSshHostname()) && !isNullOrWhitespace(getSshPrivateKeyFile()); } /** * Get whether the SSH private key file exists. * * @return returns {@code true} if the file exists, {@code false}, otherwise. */ public boolean isSshPrivateKeyFileExists() { return Files.exists(getPath(getSshPrivateKeyFile(), getDocumentDbSearchPaths())); } /** * Applies the socket-related connection properties to the given client settings builder. * @param clientSettingsBuilder The client settings builder to apply the properties to. */ private void applySocketSettings( final MongoClientSettings.Builder clientSettingsBuilder) { final Integer connectTimeout = getLoginTimeout(); clientSettingsBuilder.applyToSocketSettings( b -> { if (connectTimeout != null) { b.connectTimeout(connectTimeout, TimeUnit.SECONDS); } }); } /** * Applies the TLS/SSL-related connection properties to the given client settings builder. * @param clientSettingsBuilder The client settings builder to apply the properties to. */ private void applyTlsSettings(final MongoClientSettings.Builder clientSettingsBuilder) { clientSettingsBuilder.applyToSslSettings(this::applyToSslSettings); } @SneakyThrows private void applyToSslSettings(final SslSettings.Builder builder) { // Handle tls and tlsAllowInvalidHostnames options. final boolean tlsEnabled = getTlsEnabled(); final boolean tlsAllowInvalidHostnames = getTlsAllowInvalidHostnames(); builder .enabled(tlsEnabled) .invalidHostNameAllowed(tlsAllowInvalidHostnames); if (!tlsEnabled) { return; } applyCertificateAuthorities(builder); } private void applyCertificateAuthorities(final SslSettings.Builder builder) throws IOException, SQLException { final List<Certificate> caCertificates = new ArrayList<>(); // Append embedded CA root certificate(s), and optionally including the tlsCAFile option, if provided. appendEmbeddedAndOptionalCaCertificates(caCertificates); // Add the system and JDK trusted certificates. caCertificates.addAll(CertificateUtils.getSystemTrustedCertificates()); caCertificates.addAll(CertificateUtils.getJdkTrustedCertificates()); // Create the SSL context and apply to the builder. final SSLContext sslContext = SSLFactory.builder() .withTrustMaterial(caCertificates) .build() .getSslContext(); builder.context(sslContext); } @VisibleForTesting void appendEmbeddedAndOptionalCaCertificates(final List<Certificate> caCertificates) throws IOException, SQLException { // If provided, add user-specified CA root certificate file. if (!Strings.isNullOrEmpty(getTlsCAFilePath())) { final String tlsCAFileName = getTlsCAFilePath(); final Path tlsCAFileNamePath; // Allow certificate file to be found under one the trusted DocumentDB folders tlsCAFileNamePath = getPath(tlsCAFileName, getDocumentDbSearchPaths()); if (tlsCAFileNamePath.toFile().exists()) { try (InputStream inputStream = Files.newInputStream(tlsCAFileNamePath)) { caCertificates.addAll(CertificateUtils.loadCertificate(inputStream)); } } else { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.TLS_CA_FILE_NOT_FOUND, tlsCAFileNamePath); } } // Load embedded CA root certificates. try (InputStream pem2019ResourceAsStream = getClass().getResourceAsStream(ROOT_2019_PEM_RESOURCE_FILE_NAME); InputStream pem2021ResourceAsStream = getClass().getResourceAsStream(ROOT_2021_PEM_RESOURCE_FILE_NAME)) { caCertificates.addAll(CertificateUtils.loadCertificate(pem2019ResourceAsStream)); caCertificates.addAll(CertificateUtils.loadCertificate(pem2021ResourceAsStream)); } } /** * Gets an absolute path from the given file path. It performs the substitution for a leading * '~' to be replaced by the user's home directory. * * @param filePathString the given file path to process. * @param searchFolders list of folders * @return a {@link Path} for the absolution path for the given file path. */ public static Path getPath(final String filePathString, final String... searchFolders) { final Path filePath = Paths.get(filePathString); if (filePathString.matches(HOME_PATH_PREFIX_REG_EXPR)) { final String fromHomePath = filePathString.replaceFirst("~", Matcher.quoteReplacement(USER_HOME_PATH_NAME)); return Paths.get(fromHomePath).toAbsolutePath(); } else { if (filePath.isAbsolute()) { return filePath; } for (String searchFolder : searchFolders) { if (searchFolder == null) { continue; } final Path testPath = Paths.get(searchFolder, filePathString); if (testPath.toAbsolutePath().toFile().exists()) { return testPath; } } } return filePath.toAbsolutePath(); } /** * Attempts to retrieve a property as a ReadPreference. * * @param key The property to retrieve. * @return The retrieved property as a ReadPreference or null if it did not exist or was not a * valid ReadPreference. */ private DocumentDbReadPreference getPropertyAsReadPreference(@NonNull final String key) { DocumentDbReadPreference property = null; try { if (getProperty(key) != null) { property = DocumentDbReadPreference.fromString(getProperty(key)); } } catch (IllegalArgumentException e) { LOGGER.warn("Property {{}} was ignored as it was not a valid read preference.", key, e); } return property; } /** * Attempts to retrieve a property as a DocumentDbMetadataScanMethod. * * @param key The property to retrieve. * @return The retrieved property as a ReadPreference or null if it did not exist or was not a * valid ReadPreference. */ private DocumentDbMetadataScanMethod getPropertyAsScanMethod(@NonNull final String key) { DocumentDbMetadataScanMethod property = null; try { if (getProperty(key) != null) { property = DocumentDbMetadataScanMethod.fromString(getProperty(key)); } else if (DocumentDbConnectionProperty.getPropertyFromKey(key) != null) { property = DocumentDbMetadataScanMethod.fromString( DocumentDbConnectionProperty.getPropertyFromKey(key).getDefaultValue()); } } catch (IllegalArgumentException e) { LOGGER.warn("Property {{}} was ignored as it was not a valid read preference.", key, e); } return property; } /** * Attempts to retrieve a property as a DocumentDbAllowDiskUseOption. * * @param key The property to retrieve. * @return The retrieved property as a DocumentDbAllowDiskUseOption or null if it did not exist or was not a * valid DocumentDbAllowDiskUseOption. */ private DocumentDbAllowDiskUseOption getPropertyAsAllowDiskUseOption(@NonNull final String key) { DocumentDbAllowDiskUseOption property = null; try { if (getProperty(key) != null) { property = DocumentDbAllowDiskUseOption.fromString(getProperty(key)); } else if (DocumentDbConnectionProperty.getPropertyFromKey(key) != null) { property = DocumentDbAllowDiskUseOption.fromString( DocumentDbConnectionProperty.getPropertyFromKey(key).getDefaultValue()); } } catch (IllegalArgumentException e) { LOGGER.warn("Property {{}} was ignored as it was not a valid allow disk use option.", key, e); } return property; } /** * Attempts to retrieve a property as a Long. * * @param key The property to retrieve. * @return The retrieved property as a Long or null if it did not exist or could not be parsed. */ private Long getPropertyAsLong(@NonNull final String key) { Long property = null; try { if (getProperty(key) != null) { property = Long.parseLong(getProperty(key)); } } catch (NumberFormatException e) { LOGGER.warn("Property {{}} was ignored as it was not of type long.", key, e); } return property; } /** * Attempts to retrieve a property as an Integer. * * @param key The property to retrieve. * @return The retrieved property as an Integer or null if it did not exist or could not be * parsed. */ private Integer getPropertyAsInteger(@NonNull final String key) { Integer property = null; try { if (getProperty(key) != null) { property = Integer.parseInt(getProperty(key)); } else if (DocumentDbConnectionProperty.getPropertyFromKey(key) != null) { property = Integer.parseInt( DocumentDbConnectionProperty.getPropertyFromKey(key).getDefaultValue()); } } catch (NumberFormatException e) { LOGGER.warn("Property {{}} was ignored as it was not of type integer.", key, e); } return property; } /** * Checks whether the value is null or contains white space. * @param value the value to test. * @return returns {@code true} if the value is null or contains white space, or {@code false}, * otherwise. */ public static boolean isNullOrWhitespace(@Nullable final String value) { return value == null || WHITE_SPACE_PATTERN.matcher(value).matches(); } }
4,574
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbAbstractResultSet.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import org.apache.commons.beanutils.ConversionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.common.utilities.TypeConverters; import javax.sql.rowset.serial.SerialBlob; import javax.sql.rowset.serial.SerialClob; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.Reader; import java.io.StringReader; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import java.sql.Blob; import java.sql.Clob; import java.sql.Date; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.util.Calendar; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; /** * Provides value processing. */ public abstract class DocumentDbAbstractResultSet extends software.amazon.documentdb.jdbc.common.ResultSet { private static final Logger LOGGER = LoggerFactory .getLogger(DocumentDbAbstractResultSet.class); private final ImmutableList<JdbcColumnMetaData> columnMetaData; private final Map<String, Integer> columnToIndexMap; private final int columnCount; private boolean wasNull = false; private ResultSetMetaData resultSetMetaData = null; private final boolean caseSensitive; /** * Instantiates the {@link DocumentDbAbstractResultSet} class. This will treat * column labels as case-insensitive. * * @param statement the statement that generated this result set. * @param columnMetaData the column metadata of the result set. */ DocumentDbAbstractResultSet( final Statement statement, final ImmutableList<JdbcColumnMetaData> columnMetaData) { this(statement, columnMetaData, false); } /** * Instantiates the {@link DocumentDbAbstractResultSet} class. * * @param statement the statement that generated this result set. * @param columnMetaData the column metadata of the result set. * @param caseSensitive indicator of whether the column label should be case sensitive. */ DocumentDbAbstractResultSet( final Statement statement, final ImmutableList<JdbcColumnMetaData> columnMetaData, final boolean caseSensitive) { super(statement); this.columnMetaData = columnMetaData; this.columnCount = columnMetaData.size(); this.caseSensitive = caseSensitive; this.columnToIndexMap = buildColumnIndices(columnMetaData); } private Map<String, Integer> buildColumnIndices( final ImmutableList<JdbcColumnMetaData> columnMetaData) { final Map<String, Integer> columnIndices; columnIndices = caseSensitive ? new HashMap<>() : new TreeMap<>(String.CASE_INSENSITIVE_ORDER); // Note log(N) access for (JdbcColumnMetaData column : columnMetaData) { // Convert to one-indexed. columnIndices.put(column.getColumnLabel(), column.getOrdinal() + 1); } return columnIndices; } /** * Verifies that the current row is not before the first or after the last row. * * @throws SQLException if the current row is before the first or after the last row. */ protected void verifyRow() throws SQLException { if (isBeforeFirst()) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.BEFORE_FIRST); } else if (isAfterLast()) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.AFTER_LAST); } } /** * Verifies that the given (one-based) column index is with in the expected range. * * @param columnIndex the column index to verify. * @throws SQLException if the column index is before the first or after the last column index. */ protected void verifyColumnIndex(final int columnIndex) throws SQLException { if (columnIndex < 1 || columnIndex > columnCount) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.INVALID_INDEX, columnIndex, columnCount); } } /** * Verifies that the the result set is open, the row is correct and the given column index is * valid. * * @param columnIndex the column index to verify. * @throws SQLException the result set is closed, the row is incorrect or the given * column index is invalid. */ protected void verifyState(final int columnIndex) throws SQLException { verifyOpen(); verifyRow(); verifyColumnIndex(columnIndex); } /** * Gets the value in the target type on the current row and given index. * * @param columnIndex the index of the cell value. * @param targetType the intended target type. * @param <T> the intended target type. * * @return a value that is possibly converted to the target type. * @throws SQLException the result set is closed, the row is incorrect or the given * * column index is invalid. */ private <T> T getValue(final int columnIndex, final Class<T> targetType) throws SQLException { verifyState(columnIndex); final Object o = getValue(columnIndex); wasNull = (o == null); // If value is null, just use the target type as the source type. // This will ensure we get the default value. final Class<?> sourceType = wasNull ? targetType : o.getClass(); try { return TypeConverters.get(sourceType, targetType).convert(targetType, o); } catch (ConversionException e) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, e, SqlError.UNSUPPORTED_CONVERSION, sourceType.getSimpleName(), targetType.getSimpleName()); } } /** * Gets the value of the cell are the current row and the given column index. * * @param columnIndex the (one-based) column index in the current row. * * @return the cell value. * @throws SQLException throws a SQLException */ protected abstract Object getValue(final int columnIndex) throws SQLException; @Override public boolean wasNull() throws SQLException { verifyOpen(); return wasNull; } @Override public String getString(final int columnIndex) throws SQLException { return getValue(columnIndex, String.class); } @Override public boolean getBoolean(final int columnIndex) throws SQLException { return getValue(columnIndex, boolean.class); } @Override public byte getByte(final int columnIndex) throws SQLException { return getValue(columnIndex, byte.class); } @Override public short getShort(final int columnIndex) throws SQLException { return getValue(columnIndex, short.class); } @Override public int getInt(final int columnIndex) throws SQLException { return getValue(columnIndex, int.class); } @Override public long getLong(final int columnIndex) throws SQLException { return getValue(columnIndex, long.class); } @Override public float getFloat(final int columnIndex) throws SQLException { return getValue(columnIndex, float.class); } @Override public double getDouble(final int columnIndex) throws SQLException { return getValue(columnIndex, double.class); } @Override public byte[] getBytes(final int columnIndex) throws SQLException { return getValue(columnIndex, byte[].class); } @Override public Date getDate(final int columnIndex) throws SQLException { return getDate(columnIndex, null); } @Override public Time getTime(final int columnIndex) throws SQLException { return getTime(columnIndex, null); } @Override public Timestamp getTimestamp(final int columnIndex) throws SQLException { return getTimestamp(columnIndex, null); } @Override public InputStream getAsciiStream(final int columnIndex) throws SQLException { final String value = getString(columnIndex); if (value == null) { return null; } return new ByteArrayInputStream(value.getBytes(StandardCharsets.US_ASCII)); } @Override public InputStream getBinaryStream(final int columnIndex) throws SQLException { final byte[] value = getValue(columnIndex, byte[].class); if (value == null) { return null; } return new ByteArrayInputStream(value); } @Override public ResultSetMetaData getMetaData() { if (resultSetMetaData == null) { return new DocumentDbResultSetMetaData(columnMetaData); } return resultSetMetaData; } @Override public Object getObject(final int columnIndex) throws SQLException { // Don't try to convert to Object here. Return value as is but check if null. verifyState(columnIndex); final Object o = getValue(columnIndex); wasNull = (o == null); if (!wasNull) { // Use default converter for the class. As we don't want the Bson types to leak. return TypeConverters.get(o.getClass(), Object.class).convert(null, o); } return null; } @Override public int findColumn(final String columnLabel) throws SQLException { final Integer columnIndex = columnToIndexMap.get(columnLabel); if (columnIndex == null) { throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.INVALID_COLUMN_LABEL, columnLabel); } return columnIndex; } @Override public Reader getCharacterStream(final int columnIndex) throws SQLException { final String value = getValue(columnIndex, String.class); if (value == null) { return null; } return new StringReader(value); } @Override public BigDecimal getBigDecimal(final int columnIndex) throws SQLException { return getValue(columnIndex, BigDecimal.class); } @Override public Date getDate(final int columnIndex, final Calendar cal) throws SQLException { final Date value = getValue(columnIndex, Date.class); if (value == null) { return null; } return getMaybeAdjustedTime(value, cal); } private Date getMaybeAdjustedTime(final Date utcTime, final Calendar cal) { if (cal != null) { long adjustedTime = utcTime.getTime(); adjustedTime -= cal.getTimeZone().getOffset(adjustedTime); return new Date(adjustedTime); } return utcTime; } @Override public Time getTime(final int columnIndex, final Calendar cal) throws SQLException { final Date value = getDate(columnIndex, cal); if (value == null) { return null; } return new Time(value.getTime()); } @Override public Timestamp getTimestamp(final int columnIndex, final Calendar cal) throws SQLException { final Date value = getDate(columnIndex, cal); if (value == null) { return null; } return new Timestamp(value.getTime()); } @Override public Reader getNCharacterStream(final int columnIndex) throws SQLException { return getCharacterStream(columnIndex); } @Override public <T> T getObject(final int columnIndex, final Class<T> type) throws SQLException { return getValue(columnIndex, type); } @Override public String getNString(final int columnIndex) throws SQLException { return getString(columnIndex); } @Override public Blob getBlob(final int columnIndex) throws SQLException { final byte[] bytes = getBytes(columnIndex); if (bytes == null) { return null; } return new SerialBlob(bytes); } @Override public Clob getClob(final int columnIndex) throws SQLException { final String value = getString(columnIndex); if (value == null) { return null; } return new SerialClob(value.toCharArray()); } }
4,575
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbMain.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.StdSerializer; import com.fasterxml.jackson.databind.util.StdDateFormat; import com.fasterxml.jackson.datatype.guava.GuavaModule; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.mongodb.DuplicateKeyException; import com.mongodb.client.MongoClient; import lombok.Getter; import lombok.NonNull; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.MissingOptionException; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionGroup; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.text.StringEscapeUtils; import org.apache.logging.log4j.util.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaSecurityException; import java.io.Console; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.SQLException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Set; import java.util.stream.Collectors; import static com.google.common.base.Strings.isNullOrEmpty; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.USER_HOME_PROPERTY; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_LATEST_OR_NONE; import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_NEW; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SQL_NAME_PROPERTY; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable.COLLECTION_NAME_PROPERTY; import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable.COLUMNS_PROPERTY; public class DocumentDbMain { public static final String LIBRARY_NAME; public static final String ARCHIVE_VERSION; public static final Path USER_HOME_PATH = Paths.get(System.getProperty(USER_HOME_PROPERTY)); @VisibleForTesting static final Options COMPLETE_OPTIONS; @VisibleForTesting static final String DATE_FORMAT_PATTERN = "yyyy-MM-dd'T'HH:mm:ssXXX"; static final ObjectMapper JSON_OBJECT_MAPPER = JsonMapper.builder() .defaultDateFormat(new StdDateFormat().withColonInTimeZone(true)) .serializationInclusion(Include.NON_NULL) .serializationInclusion(Include.NON_EMPTY) .serializationInclusion(Include.NON_DEFAULT) .enable(SerializationFeature.INDENT_OUTPUT) .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) .enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS) // Make the enums serialize to lower case. .addModule(buildEnumLowerCaseSerializerModule()) .addModule(new GuavaModule()) // Immutable* .build(); private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbMain.class); private static final Options HELP_VERSION_OPTIONS; private static final Option HELP_OPTION; private static final Option VERSION_OPTION; private static final OptionGroup COMMAND_OPTIONS; private static final List<Option> REQUIRED_OPTIONS; private static final List<Option> OPTIONAL_OPTIONS; // String constants private static final String LIBRARY_NAME_DEFAULT = "documentdb-jdbc"; // Option string constants private static final String DATABASE_OPTION_FLAG = "d"; private static final String DATABASE_OPTION_NAME = "database"; private static final String EXPORT_OPTION_FLAG = "e"; private static final String EXPORT_OPTION_NAME = "export"; private static final String GENERATE_NAME_OPTION_FLAG = "g"; private static final String GENERATE_NEW_OPTION_NAME = "generate-new"; private static final String HELP_OPTION_FLAG = "h"; private static final String HELP_OPTION_NAME = "help"; private static final String IMPORT_OPTION_FLAG = "i"; private static final String IMPORT_OPTION_NAME = "import"; private static final String LIST_OPTION_FLAG = "l"; private static final String LIST_OPTION_NAME = "list-schema"; private static final String LIST_TABLES_OPTION_FLAG = "b"; private static final String LIST_TABLES_OPTION_NAME = "list-tables"; private static final String OUTPUT_OPTION_FLAG = "o"; private static final String OUTPUT_OPTION_NAME = "output"; private static final String PASSWORD_OPTION_FLAG = "p"; private static final String PASSWORD_OPTION_NAME = "password"; private static final String REMOVE_OPTION_FLAG = "r"; private static final String REMOVE_OPTION_NAME = "remove"; private static final String SCAN_LIMIT_OPTION_FLAG = "x"; private static final String SCAN_LIMIT_OPTION_NAME = "scan-limit"; private static final String SCAN_METHOD_OPTION_FLAG = "m"; private static final String SCAN_METHOD_OPTION_NAME = "scan-method"; private static final String SCHEMA_NAME_OPTION_FLAG = "n"; private static final String SCHEMA_NAME_OPTION_NAME = "schema-name"; private static final String SERVER_OPTION_FLAG = "s"; private static final String SERVER_OPTION_NAME = "server"; private static final String TLS_ALLOW_INVALID_HOSTNAMES_OPTION_FLAG = "a"; private static final String TLS_ALLOW_INVALID_HOSTNAMES_OPTION_NAME = "tls-allow-invalid-hostnames"; private static final String TLS_OPTION_FLAG = "t"; private static final String TLS_OPTION_NAME = "tls"; private static final String USER_OPTION_FLAG = "u"; private static final String USER_OPTION_NAME = "user"; private static final String VERSION_OPTION_NAME = "version"; // Option argument string constants private static final String DATABASE_NAME_ARG_NAME = "database-name"; private static final String FILE_NAME_ARG_NAME = "file-name"; private static final String HOST_NAME_ARG_NAME = "host-name"; private static final String MAX_DOCUMENTS_ARG_NAME = "max-documents"; private static final String METHOD_ARG_NAME = "method"; private static final String USER_NAME_ARG_NAME = "user-name"; private static final String TABLE_NAMES_ARG_NAME = "[table-name[,...]]"; // Option description string constants private static final String GENERATE_NEW_OPTION_DESCRIPTION = "Generates a new schema for the database. " + "This will have the effect of replacing an existing schema " + "of the same name, if it exists."; private static final String REMOVE_OPTION_DESCRIPTION = "Removes the schema from storage for schema given by -m <schema-name>, " + "or for schema '_default', if not provided."; private static final String VERSION_OPTION_DESCRIPTION = "Prints the version number of the" + " command."; private static final String HELP_OPTION_DESCRIPTION = "Prints the command line syntax."; private static final String SERVER_OPTION_DESCRIPTION = "The hostname and optional port number (default: 27017) in the format " + "hostname[:port]. Required."; private static final String DATABASE_OPTION_DESCRIPTION = "The name of the database for the schema operations. Required."; private static final String USER_OPTION_DESCRIPTION = "The name of the user performing the schema operations. Required. " + "Note: the user will require readWrite role on the <database-name> where " + "the schema are stored if creating or modifying schema."; private static final String PASSWORD_OPTION_DESCRIPTION = "The password for the user performing the schema operations. Optional. " + "If this option is not provided, the end-user will be prompted to enter " + "the password directly."; private static final String SCHEMA_NAME_OPTION_DESCRIPTION = "The name of the schema. Default: _default."; private static final String SCAN_METHOD_OPTION_DESCRIPTION = "The scan method to sample documents from the collections. " + "One of: random, idForward, idReverse, or all. " + "Used in conjunction with the --generate-new command. " + "Default: random."; private static final String SCAN_LIMIT_OPTION_DESCRIPTION = "The maximum number of documents to sample in each collection. " + "Used in conjunction with the --generate-new command. " + "Default: 1000."; private static final String TLS_OPTION_DESCRIPTION = "The indicator of whether to use TLS encryption when connecting to DocumentDB. " + "Default: false."; private static final String TLS_ALLOW_INVALID_HOSTNAMES_OPTION_DESCRIPTION = "The indicator of whether to allow invalid hostnames when connecting to " + "DocumentDB. Default: false."; private static final String LIST_OPTION_DESCRIPTION = "Lists the schema names, version and table names available in the schema repository."; private static final String LIST_TABLES_OPTION_DESCRIPTION = "Lists the SQL table names in a schema."; private static final String EXPORT_OPTION_DESCRIPTION = "Exports the schema to for SQL tables named [<table-name>[,<table-name>[…]]]. If no" + " <table-name> are given, all table schema will be exported. By default," + " the schema is written to stdout. Use the --output option to write to a file." + " The output format is JSON."; private static final String IMPORT_OPTION_DESCRIPTION = "Imports the schema from <file-name> in your home directory. The schema will be imported using the" + " <schema-name> and a new version will be added - replacing the existing" + " schema. The expected input format is JSON."; private static final String OUTPUT_OPTION_DESCRIPTION = "Write the exported schema to <file-name> in your home directory (instead of stdout)." + " This will overwrite any existing file with the same name"; // Messages string constants public static final String DUPLICATE_COLUMN_KEY_DETECTED_FOR_TABLE_SCHEMA = "Duplicate column key '%s' detected for table schema '%s'. Original column '%s'." + " Duplicate column '%s'."; private static final String NEW_SCHEMA_VERSION_GENERATED_MESSAGE = "New schema '%s', version '%s' generated."; private static final String REMOVED_SCHEMA_MESSAGE = "Removed schema '%s'."; private static MongoClient client; static { ARCHIVE_VERSION = getArchiveVersion(); LIBRARY_NAME = getLibraryName(); HELP_OPTION = buildHelpOption(); VERSION_OPTION = buildVersionOption(); COMMAND_OPTIONS = buildCommandOptions(); REQUIRED_OPTIONS = buildRequiredOptions(); OPTIONAL_OPTIONS = buildOptionalOptions(); // Add all option types. COMPLETE_OPTIONS = new Options(); COMPLETE_OPTIONS.addOptionGroup(COMMAND_OPTIONS); REQUIRED_OPTIONS.forEach(COMPLETE_OPTIONS::addOption); OPTIONAL_OPTIONS.forEach(COMPLETE_OPTIONS::addOption); // Add options to check for 'help' or 'version'. HELP_VERSION_OPTIONS = new Options() .addOption(HELP_OPTION) .addOption(VERSION_OPTION); } /** * Performs schema commands via the command line. * <pre> * -a,--tls-allow-invalid-hostnames The indicator of whether to allow invalid * hostnames when connecting to DocumentDB. * Default: false. * -b,--list-tables Lists the SQL table names in a schema. * -d,--database &#60;database-name&#62; The name of the database for the schema * operations. Required. * -e,--export &#60;[table-name[,...]]&#62; Exports the schema to for SQL tables named * [&#60;table-name&#62;[,&#60;table-name&#62;[…]]]. If no * &#60;table-name&#62; are given, all table schema will * be exported. By default, the schema is * written to stdout. Use the --output option to * write to a file. The output format is JSON. * -g,--generate-new Generates a new schema for the database. This * will have the effect of replacing an existing * schema of the same name, if it exists. * -h,--help Prints the command line syntax. * -i,--import &#60;file-name&#62; Imports the schema from &#60;file-name&#62; in your * home directory. The schema will be imported * using the &#60;schema-name&#62; and a new version * will be added - replacing the existing * schema. The expected input format is JSON. * -l,--list-schema Lists the schema names, version and table * names available in the schema repository. * -m,--scan-method &#60;method&#62; The scan method to sample documents from the * collections. One of: random, idForward, * idReverse, or all. Used in conjunction with * the --generate-new command. Default: random. * -n,--schema-name &#60;schema-name&#62; The name of the schema. Default: _default. * -o,--output &#60;file-name&#62; Write the exported schema to &#60;file-name&#62; in * your home directory (instead of stdout). This * will overwrite any existing file with the * same name * -p,--password &#60;password&#62; The password for the user performing the * schema operations. Optional. If this option * is not provided, the end-user will be * prompted to enter the password directly. * -r,--remove Removes the schema from storage for schema * given by -m &#60;schema-name&#62;, or for schema * '_default', if not provided. * -s,--server &#60;host-name&#62; The hostname and optional port number * (default: 27017) in the format * hostname[:port]. Required. * -t,--tls The indicator of whether to use TLS * encryption when connecting to DocumentDB. * Default: false. * -u,--user &#60;user-name&#62; The name of the user performing the schema * operations. Required. Note: the user will * require readWrite role on the &#60;database-name&#62; * where the schema are stored if creating or * modifying schema. * --version Prints the version number of the command. * -x,--scan-limit &#60;max-documents&#62; The maximum number of documents to sample in * each collection. Used in conjunction with the * --generate-new command. Default: 1000., * </pre> * @param args the command line arguments. */ public static void main(final String[] args) { try { final StringBuilder output = new StringBuilder(); handleCommandLine(args, output); if (output.length() > 0) { LOGGER.error("{}", output); } } catch (SQLException e) { LOGGER.error(e.getMessage(), e); } catch (Exception e) { LOGGER.error( "Unexpected exception: '{}'", e.getMessage(), e); } } static void handleCommandLine(final String[] args, final StringBuilder output) throws SQLException { if (handledHelpOrVersionOption(args, output)) { return; } try { final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine = parser.parse(COMPLETE_OPTIONS, args); final DocumentDbConnectionProperties properties = new DocumentDbConnectionProperties(); if (!tryGetConnectionProperties(commandLine, properties, output)) { return; } performCommand(commandLine, properties, output); } catch (MissingOptionException e) { output.append(e.getMessage()).append(String.format("%n")); printHelp(output); } catch (ParseException e) { output.append(e.getMessage()); } catch (Exception e) { output.append(e.getClass().getSimpleName()) .append(": ") .append(e.getMessage()); } finally { closeClient(); } } private static void performCommand( final CommandLine commandLine, final DocumentDbConnectionProperties properties, final StringBuilder output) throws SQLException { switch (COMMAND_OPTIONS.getSelected()) { case GENERATE_NAME_OPTION_FLAG: // --generate-new performGenerateNew(properties, output); break; case REMOVE_OPTION_FLAG: // --remove performRemove(properties, output); break; case LIST_OPTION_FLAG: // --list-schema performListSchema(properties, output); break; case LIST_TABLES_OPTION_FLAG: // --list-tables performListTables(properties, output); break; case EXPORT_OPTION_FLAG: // --export performExport(commandLine, properties, output); break; case IMPORT_OPTION_FLAG: // --import performImport(commandLine, properties, output); break; default: output.append(SqlError.lookup(SqlError.UNSUPPORTED_PROPERTY, COMMAND_OPTIONS.getSelected())); break; } } private static MongoClient getMongoClient(final DocumentDbConnectionProperties properties) { if (client == null) { client = properties.createMongoClient(); } return client; } private static void closeClient() { if (client != null) { client.close(); client = null; } } private static void performImport( final CommandLine commandLine, final DocumentDbConnectionProperties properties, final StringBuilder output) throws DuplicateKeyException { final File importFile = tryGetImportFile(commandLine, output); if (importFile == null) { return; } final List<TableSchema> tableSchemaList = tryReadTableSchemaList(importFile, output); if (tableSchemaList == null) { return; } final List<DocumentDbSchemaTable> schemaTableList = tryGetSchemaTableList( tableSchemaList, output); if (schemaTableList == null) { return; } updateTableSchema(properties, schemaTableList, output); } private static void updateTableSchema( final DocumentDbConnectionProperties properties, final List<DocumentDbSchemaTable> schemaTableList, final StringBuilder output) { try { DocumentDbDatabaseSchemaMetadata.update( properties, properties.getSchemaName(), schemaTableList, getMongoClient(properties)); } catch (SQLException | DocumentDbSchemaSecurityException e) { output.append(e.getClass().getSimpleName()) .append(" ") .append(e.getMessage()); } } private static List<TableSchema> tryReadTableSchemaList( final File importFile, final StringBuilder output) { final List<TableSchema> tableSchemaList; try { tableSchemaList = JSON_OBJECT_MAPPER.readValue(importFile, new TypeReference<List<TableSchema>>() { }); } catch (IOException e) { output.append(e.getClass().getSimpleName()) .append(" ") .append(e.getMessage()); return null; } return tableSchemaList; } private static List<DocumentDbSchemaTable> tryGetSchemaTableList( final List<TableSchema> tableSchemaList, final StringBuilder output) { final List<DocumentDbSchemaTable> schemaTableList; try { schemaTableList = tableSchemaList.stream() .map(tableSchema -> new DocumentDbSchemaTable( tableSchema.getSqlName(), tableSchema.getCollectionName(), tableSchema.getColumns().stream() .collect(Collectors.toMap( DocumentDbSchemaColumn::getSqlName, c -> c, (c1, c2) -> throwingDuplicateMergeOnColumn(c1, c2, tableSchema.getSqlName()), LinkedHashMap::new)))) .collect(Collectors.toList()); } catch (IllegalStateException e) { output.append(e.getMessage()); return null; } return schemaTableList; } private static DocumentDbSchemaColumn throwingDuplicateMergeOnColumn( final DocumentDbSchemaColumn c1, final DocumentDbSchemaColumn c2, final String sqlName) { throw new IllegalStateException(String.format(DUPLICATE_COLUMN_KEY_DETECTED_FOR_TABLE_SCHEMA, c1.getSqlName(), sqlName, c1, c2)); } private static File tryGetImportFile( final CommandLine commandLine, final StringBuilder output) { final String importFileName = commandLine.getOptionValue(IMPORT_OPTION_FLAG, null); if (isNullOrEmpty(importFileName)) { output.append(String.format("Option '-%s' requires a file name argument.", IMPORT_OPTION_FLAG)); return null; } final Path importFilePath = USER_HOME_PATH.resolve(importFileName); if (!importFilePath.toFile().exists()) { output.append(String.format("Import file '%s' not found in your user's home folder.", importFileName)); return null; } return importFilePath.toFile(); } private static void performExport( final CommandLine commandLine, final DocumentDbConnectionProperties properties, final StringBuilder output) throws SQLException { // Determine if output file is required. final File outputFile; if (commandLine.hasOption(OUTPUT_OPTION_FLAG)) { outputFile = tryGetOutputFile(commandLine, output); if (outputFile == null) { return; } } else { outputFile = null; } final String[] requestedTableNames = commandLine.getOptionValues(EXPORT_OPTION_FLAG); final List<String> requestedTableList = requestedTableNames != null ? Arrays.asList(requestedTableNames) : new ArrayList<>(); final DocumentDbDatabaseSchemaMetadata schema = DocumentDbDatabaseSchemaMetadata.get( properties, properties.getSchemaName(), VERSION_LATEST_OR_NONE, getMongoClient(properties)); if (schema == null) { // No schema to export. return; } final Set<String> availTableSet = schema.getTableSchemaMap().keySet(); if (requestedTableList.isEmpty()) { requestedTableList.addAll(availTableSet); } else if (verifyRequestedTablesExist(requestedTableList, availTableSet, output)) { return; } final List<TableSchema> tableSchemaList = requestedTableList.stream() .map(tableName -> new TableSchema(schema.getTableSchemaMap().get(tableName))) .sorted(Comparator.comparing(TableSchema::getSqlName)) .collect(Collectors.toList()); try { writeTableSchemas(tableSchemaList, outputFile, output); } catch (IOException e) { output.append(e.getClass().getSimpleName()) .append(" ") .append(e.getMessage()); } } private static boolean verifyRequestedTablesExist( final List<String> requestedTableList, final Set<String> availTableNames, final StringBuilder output) { if (!availTableNames.containsAll(requestedTableList)) { final List<String> unknownTables = requestedTableList.stream() .filter(name -> !availTableNames.contains(name)) .collect(Collectors.toList()); output.append("Requested table name(s) are not recognized in schema: ") .append(Strings.join(unknownTables, ',')) .append(String.format("%n")) .append("Available table names: ") .append(Strings.join(availTableNames, ',')); return true; } return false; } private static void writeTableSchemas( final List<TableSchema> tables, final File outputFile, final StringBuilder output) throws IOException { try (Writer writer = outputFile != null ? new OutputStreamWriter(Files.newOutputStream(outputFile.toPath()), StandardCharsets.UTF_8) : new StringBuilderWriter(output)) { JSON_OBJECT_MAPPER.writeValue(writer, tables); } } private static File tryGetOutputFile(final CommandLine commandLine, final StringBuilder output) { if (!USER_HOME_PATH.toFile().exists()) { output.append("User's home directory does not exist."); return null; } final String outputFileName = commandLine.getOptionValue(OUTPUT_OPTION_FLAG, null); if (isNullOrEmpty(outputFileName)) { output.append("Output file name argument must not be empty."); return null; } final Path fileNamePath = Paths.get(outputFileName).getFileName(); final File outputFile = USER_HOME_PATH.resolve(fileNamePath).toAbsolutePath().toFile(); if (outputFile.isDirectory()) { output.append("Output file name must not be a directory."); return null; } return outputFile; } private static void performListSchema( final DocumentDbConnectionProperties properties, final StringBuilder output) throws SQLException { final List<DocumentDbSchema> schemas = DocumentDbDatabaseSchemaMetadata.getSchemaList( properties, getMongoClient(properties)); for (DocumentDbSchema schema : schemas) { output.append(String.format("Name=%s, Version=%d, SQL Name=%s, Modified=%s%n", maybeQuote(schema.getSchemaName()), schema.getSchemaVersion(), maybeQuote(schema.getSqlName()), new SimpleDateFormat(DATE_FORMAT_PATTERN) .format(schema.getModifyDate())) ); } } private static void performListTables( final DocumentDbConnectionProperties properties, final StringBuilder output) throws SQLException { final DocumentDbDatabaseSchemaMetadata schema = DocumentDbDatabaseSchemaMetadata.get( properties, properties.getSchemaName(), VERSION_LATEST_OR_NONE, getMongoClient(properties)); if (schema != null) { final List<String> sortedTableNames = schema.getTableSchemaMap().keySet().stream() .sorted() .collect(Collectors.toList()); for (String tableName : sortedTableNames) { output.append(String.format("%s%n", tableName)); } } } @VisibleForTesting static String maybeQuote(final String value) { return StringEscapeUtils.escapeCsv(value); } private static void performRemove( final DocumentDbConnectionProperties properties, final StringBuilder output) throws SQLException { DocumentDbDatabaseSchemaMetadata.remove( properties, properties.getSchemaName(), getMongoClient(properties)); output.append(String.format(REMOVED_SCHEMA_MESSAGE, properties.getSchemaName())); } private static void performGenerateNew( final DocumentDbConnectionProperties properties, final StringBuilder output) throws SQLException { final DocumentDbDatabaseSchemaMetadata schema = DocumentDbDatabaseSchemaMetadata.get( properties, properties.getSchemaName(), VERSION_NEW, getMongoClient(properties)); if (schema != null) { output.append(String.format(NEW_SCHEMA_VERSION_GENERATED_MESSAGE, schema.getSchemaName(), schema.getSchemaVersion())); } } @VisibleForTesting static boolean tryGetConnectionProperties( final CommandLine commandLine, final DocumentDbConnectionProperties properties, final StringBuilder output) { properties.setHostname(commandLine.getOptionValue(SERVER_OPTION_FLAG)); properties.setDatabase(commandLine.getOptionValue(DATABASE_OPTION_FLAG)); properties.setUser(commandLine.getOptionValue(USER_OPTION_FLAG)); if (!trySetPassword(commandLine, properties, output)) { return false; } properties.setTlsEnabled(String.valueOf(commandLine.hasOption(TLS_OPTION_FLAG))); properties.setTlsAllowInvalidHostnames(String.valueOf(commandLine.hasOption(TLS_ALLOW_INVALID_HOSTNAMES_OPTION_FLAG))); properties.setMetadataScanMethod(commandLine.getOptionValue( SCAN_METHOD_OPTION_FLAG, DocumentDbConnectionProperty.METADATA_SCAN_METHOD.getDefaultValue())); properties.setMetadataScanLimit(commandLine.getOptionValue( SCAN_LIMIT_OPTION_FLAG, DocumentDbConnectionProperty.METADATA_SCAN_LIMIT.getDefaultValue())); properties.setSchemaName(commandLine.getOptionValue( SCHEMA_NAME_OPTION_FLAG, DocumentDbConnectionProperty.SCHEMA_NAME.getDefaultValue())); return true; } private static boolean trySetPassword(final CommandLine commandLine, final DocumentDbConnectionProperties properties, final StringBuilder output) { if (commandLine.hasOption(PASSWORD_OPTION_FLAG)) { properties.setPassword(commandLine.getOptionValue(PASSWORD_OPTION_FLAG)); } else { return trySetPasswordFromPromptInput(properties, output); } return true; } private static boolean trySetPasswordFromPromptInput( final DocumentDbConnectionProperties properties, final StringBuilder output) { final String passwordPrompt = SqlError.lookup(SqlError.PASSWORD_PROMPT); final Console console = System.console(); char[] password = null; if (console != null) { password = console.readPassword(passwordPrompt); } else { output.append("No console available."); } if (password == null || password.length == 0) { output.append(SqlError.lookup(SqlError.MISSING_PASSWORD)); return false; } properties.setPassword(new String(password)); return true; } private static boolean handledHelpOrVersionOption( final String[] args, final StringBuilder output) throws SQLException { final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine; try { commandLine = parser.parse(HELP_VERSION_OPTIONS, args, true); } catch (ParseException e) { throw new SQLException(e.getMessage(), e); } if (commandLine.hasOption(HELP_OPTION_NAME)) { printHelp(output); return true; } else if (commandLine.hasOption(VERSION_OPTION_NAME)) { output.append(String.format("%s: version %s", LIBRARY_NAME, ARCHIVE_VERSION)); return true; } return false; } private static void printHelp(final StringBuilder output) { final StringWriter stringWriter = new StringWriter(); final PrintWriter printWriter = new PrintWriter(stringWriter); final HelpFormatter formatter = new HelpFormatter(); final String cmdLineSyntax = formatCommandLineSyntax(); formatter.printHelp(printWriter, 80, cmdLineSyntax, null, COMPLETE_OPTIONS, 1, 2, null, false); output.append(stringWriter); } private static String formatCommandLineSyntax() { final StringBuilder cmdLineSyntax = new StringBuilder(); cmdLineSyntax.append(LIBRARY_NAME); formatOptionGroup(cmdLineSyntax); formatOptions(cmdLineSyntax, REQUIRED_OPTIONS); formatOptions(cmdLineSyntax, OPTIONAL_OPTIONS); return cmdLineSyntax.toString(); } private static void formatOptions( final StringBuilder cmdLineSyntax, final Collection<Option> options) { for (Option option : options) { cmdLineSyntax.append(" "); if (!option.isRequired()) { cmdLineSyntax.append("["); } if (option.getOpt() != null) { cmdLineSyntax.append("-").append(option.getOpt()); } else { cmdLineSyntax.append("--").append(option.getLongOpt()); } if (option.hasArg()) { cmdLineSyntax.append(String.format(" <%s>", option.getArgName())); } else if (option.hasOptionalArg()) { cmdLineSyntax.append(String.format(" [<%s>]", option.getArgName())); } if (!option.isRequired()) { cmdLineSyntax.append("]"); } } } private static void formatOptionGroup( final StringBuilder cmdLineSyntax) { cmdLineSyntax.append(" ["); boolean isFirst = true; for (Option option : COMMAND_OPTIONS.getOptions()) { if (!isFirst) { cmdLineSyntax.append(" | "); } if (!COMMAND_OPTIONS.isRequired()) { cmdLineSyntax.append("["); } if (option.getOpt() != null) { cmdLineSyntax.append("-").append(option.getOpt()); } else { cmdLineSyntax.append("--").append(option.getLongOpt()); } if (option.hasArg()) { cmdLineSyntax.append(String.format(" <%s>", option.getArgName())); } else if (option.hasOptionalArg()) { cmdLineSyntax.append(String.format(" [<%s>]", option.getArgName())); } if (!COMMAND_OPTIONS.isRequired()) { cmdLineSyntax.append("]"); } isFirst = false; } cmdLineSyntax.append("]"); } private static List<Option> buildOptionalOptions() { final List<Option> optionalOptions = new ArrayList<>(); Option currOption; currOption = Option.builder(PASSWORD_OPTION_FLAG) .longOpt(PASSWORD_OPTION_NAME) .numberOfArgs(1) .argName(PASSWORD_OPTION_NAME) .desc(PASSWORD_OPTION_DESCRIPTION) .required(false) .build(); optionalOptions.add(currOption); currOption = Option.builder(SCHEMA_NAME_OPTION_FLAG) .longOpt(SCHEMA_NAME_OPTION_NAME) .numberOfArgs(1) .argName(SCHEMA_NAME_OPTION_NAME) .desc(SCHEMA_NAME_OPTION_DESCRIPTION) .required(false) .build(); optionalOptions.add(currOption); currOption = Option.builder(SCAN_METHOD_OPTION_FLAG) .longOpt(SCAN_METHOD_OPTION_NAME) .numberOfArgs(1) .argName(METHOD_ARG_NAME) .desc(SCAN_METHOD_OPTION_DESCRIPTION) .required(false) .type(DocumentDbMetadataScanMethod.class) .build(); optionalOptions.add(currOption); currOption = Option.builder(SCAN_LIMIT_OPTION_FLAG) .longOpt(SCAN_LIMIT_OPTION_NAME) .numberOfArgs(1) .argName(MAX_DOCUMENTS_ARG_NAME) .desc(SCAN_LIMIT_OPTION_DESCRIPTION) .required(false) .type(Integer.class) .build(); optionalOptions.add(currOption); currOption = Option.builder(TLS_OPTION_FLAG) .longOpt(TLS_OPTION_NAME) .desc(TLS_OPTION_DESCRIPTION) .required(false) .build(); optionalOptions.add(currOption); currOption = Option.builder(TLS_ALLOW_INVALID_HOSTNAMES_OPTION_FLAG) .longOpt(TLS_ALLOW_INVALID_HOSTNAMES_OPTION_NAME) .desc(TLS_ALLOW_INVALID_HOSTNAMES_OPTION_DESCRIPTION) .required(false) .build(); optionalOptions.add(currOption); currOption = Option.builder(OUTPUT_OPTION_FLAG) .longOpt(OUTPUT_OPTION_NAME) .desc(OUTPUT_OPTION_DESCRIPTION) .numberOfArgs(1) .argName(FILE_NAME_ARG_NAME) .required(false) .build(); optionalOptions.add(currOption); optionalOptions.add(HELP_OPTION); optionalOptions.add(VERSION_OPTION); return optionalOptions; } private static List<Option> buildRequiredOptions() { final List<Option> requiredOptions = new ArrayList<>(); Option currOption; currOption = Option.builder(SERVER_OPTION_FLAG) .longOpt(SERVER_OPTION_NAME) .numberOfArgs(1) .argName(HOST_NAME_ARG_NAME) .desc(SERVER_OPTION_DESCRIPTION) .required() .build(); requiredOptions.add(currOption); currOption = Option.builder(DATABASE_OPTION_FLAG) .longOpt(DATABASE_OPTION_NAME) .numberOfArgs(1) .argName(DATABASE_NAME_ARG_NAME) .desc(DATABASE_OPTION_DESCRIPTION) .required() .build(); requiredOptions.add(currOption); currOption = Option.builder(USER_OPTION_FLAG) .longOpt(USER_OPTION_NAME) .numberOfArgs(1) .argName(USER_NAME_ARG_NAME) .desc(USER_OPTION_DESCRIPTION) .required() .build(); requiredOptions.add(currOption); return requiredOptions; } private static OptionGroup buildCommandOptions() { final OptionGroup commandOptions = new OptionGroup(); Option currOption; currOption = Option.builder(GENERATE_NAME_OPTION_FLAG) .longOpt(GENERATE_NEW_OPTION_NAME) .desc(GENERATE_NEW_OPTION_DESCRIPTION) .build(); commandOptions.addOption(currOption); currOption = Option.builder(REMOVE_OPTION_FLAG) .longOpt(REMOVE_OPTION_NAME) .desc(REMOVE_OPTION_DESCRIPTION) .build(); commandOptions.addOption(currOption); currOption = Option.builder(LIST_OPTION_FLAG) .longOpt(LIST_OPTION_NAME) .desc(LIST_OPTION_DESCRIPTION) .build(); commandOptions.addOption(currOption); currOption = Option.builder(LIST_TABLES_OPTION_FLAG) .longOpt(LIST_TABLES_OPTION_NAME) .desc(LIST_TABLES_OPTION_DESCRIPTION) .build(); commandOptions.addOption(currOption); currOption = Option.builder(EXPORT_OPTION_FLAG) .longOpt(EXPORT_OPTION_NAME) .desc(EXPORT_OPTION_DESCRIPTION) .argName(TABLE_NAMES_ARG_NAME) .optionalArg(true) // Allow no arguments .hasArgs() // Unlimited arguments .valueSeparator(',') .build(); commandOptions.addOption(currOption); currOption = Option.builder(IMPORT_OPTION_FLAG) .longOpt(IMPORT_OPTION_NAME) .desc(IMPORT_OPTION_DESCRIPTION) .numberOfArgs(1) .argName(FILE_NAME_ARG_NAME) .build(); commandOptions.addOption(currOption); commandOptions.setRequired(true); return commandOptions; } protected static String getLibraryName() { String libraryName = null; try { final Path path = Paths.get(DocumentDbMain.class .getProtectionDomain() .getCodeSource() .getLocation() .toURI()); final Path fileName = path.getFileName(); if (fileName != null) { libraryName = fileName.toString(); } else { libraryName = LIBRARY_NAME_DEFAULT; } } catch (URISyntaxException e) { libraryName = LIBRARY_NAME_DEFAULT; } finally { if (libraryName == null) { libraryName = LIBRARY_NAME_DEFAULT; } } return libraryName; } private static String getArchiveVersion() { return DocumentDbDriver.DRIVER_VERSION; } private static Option buildVersionOption() { return Option.builder() .longOpt(VERSION_OPTION_NAME) .desc(VERSION_OPTION_DESCRIPTION) .build(); } private static Option buildHelpOption() { return Option.builder(HELP_OPTION_FLAG) .longOpt(HELP_OPTION_NAME) .desc(HELP_OPTION_DESCRIPTION) .build(); } private static @NonNull SimpleModule buildEnumLowerCaseSerializerModule() { final SimpleModule module = new SimpleModule(); final JsonSerializer<Enum<?>> serializer = new StdSerializer<Enum<?>>(Enum.class, true) { @Override public void serialize(final Enum value, final JsonGenerator jGen, final SerializerProvider provider) throws IOException { jGen.writeString(value.name().toLowerCase()); } }; module.addSerializer(serializer); return module; } @Getter private static class TableSchema { @JsonProperty(SQL_NAME_PROPERTY) private final String sqlName; @JsonProperty(COLLECTION_NAME_PROPERTY) private final String collectionName; @JsonProperty(COLUMNS_PROPERTY) private final List<DocumentDbSchemaColumn> columns; public TableSchema(final DocumentDbSchemaTable table) { this.sqlName = table.getSqlName(); this.collectionName = table.getCollectionName(); this.columns = ImmutableList.copyOf(table.getColumns()); } @JsonCreator public TableSchema( @JsonProperty(SQL_NAME_PROPERTY) final String sqlName, @JsonProperty(COLLECTION_NAME_PROPERTY) final String collectionName, @JsonProperty(COLUMNS_PROPERTY) final List<DocumentDbSchemaColumn> columns) { this.sqlName = sqlName; this.collectionName = collectionName; this.columns = columns; } } private static class StringBuilderWriter extends Writer { private final StringBuilder stringBuilder; public StringBuilderWriter(final StringBuilder stringBuilder) { this.stringBuilder = stringBuilder; } @Override public void write(final char[] cBuf, final int off, final int len) { stringBuilder.append(cBuf, off, len); } @Override public void flush() { // noop } @Override public void close() { // noop } } }
4,576
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbPooledConnection.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import software.amazon.documentdb.jdbc.common.PooledConnection; /** * DocumentDb implementation of PooledConnection. */ public class DocumentDbPooledConnection extends PooledConnection implements javax.sql.PooledConnection { /** * DocumentDbPooledConnection constructor, initializes super class. * @param connection Connection Object. */ public DocumentDbPooledConnection(final java.sql.Connection connection) { super(connection); } }
4,577
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbDriver.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import lombok.SneakyThrows; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.InputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Properties; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.DOCUMENT_DB_SCHEME; /** * Provides a JDBC driver for the Amazon DocumentDB database. */ public class DocumentDbDriver extends software.amazon.documentdb.jdbc.common.Driver { // Note: This class must be marked public for the registration/DeviceManager to work. private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbDriver.class); private static final String DRIVER_MAJOR_VERSION_KEY = "driver.major.version"; private static final String DRIVER_MINOR_VERSION_KEY = "driver.minor.version"; private static final String DRIVER_FULL_VERSION_KEY = "driver.full.version"; private static final String DEFAULT_APPLICATION_NAME_KEY = "default.application.name"; private static final String PROPERTIES_FILE_PATH = "/project.properties"; static final int DRIVER_MAJOR_VERSION; static final int DRIVER_MINOR_VERSION; static final String DRIVER_VERSION; static final String DEFAULT_APPLICATION_NAME; // Registers the JDBC driver. static { // Retrieve driver metadata from properties file. int majorVersion = 0; int minorVersion = 0; String fullVersion = ""; String defaultApplicationName = ""; try (InputStream is = DocumentDbDatabaseMetaData.class.getResourceAsStream(PROPERTIES_FILE_PATH)) { final Properties p = new Properties(); p.load(is); majorVersion = Integer.parseInt(p.getProperty(DRIVER_MAJOR_VERSION_KEY)); minorVersion = Integer.parseInt(p.getProperty(DRIVER_MINOR_VERSION_KEY)); fullVersion = p.getProperty(DRIVER_FULL_VERSION_KEY); defaultApplicationName = p.getProperty(DEFAULT_APPLICATION_NAME_KEY); } catch (Exception e) { LOGGER.error("Error loading driver version: " + e.getMessage()); } DRIVER_MAJOR_VERSION = majorVersion; DRIVER_MINOR_VERSION = minorVersion; DRIVER_VERSION = fullVersion; DEFAULT_APPLICATION_NAME = defaultApplicationName; new DocumentDbDriver().register(); } @SneakyThrows protected void register() { DriverManager.registerDriver(this); } @Override public @Nullable Connection connect(final @Nullable String url, final Properties info) throws SQLException { if (url == null || !acceptsURL(url)) { return null; } final DocumentDbConnectionProperties properties; try { // Get the properties and options of the URL. properties = DocumentDbConnectionProperties .getPropertiesFromConnectionString(info, url, getConnectStringPrefix()); } catch (IllegalArgumentException exception) { throw new SQLException(exception.getMessage(), exception); } return new DocumentDbConnection(properties); } /** * Retrieves whether the driver thinks that it can open a connection to the given URL. * Typically drivers will return <code>true</code> if they understand the sub-protocol specified * in the URL and <code>false</code> if they do not. * * @param url the URL of the database * @return <code>true</code> if this driver understands the given URL; * <code>false</code> otherwise * @throws SQLException if a database access error occurs or the url is {@code null} */ @Override public boolean acceptsURL(final String url) throws SQLException { if (url == null) { throw new SQLException("The url cannot be null"); } return url.startsWith(getConnectStringPrefix()); } protected String getConnectStringPrefix() { return DOCUMENT_DB_SCHEME; } @Override public int getMajorVersion() { return DRIVER_MAJOR_VERSION; } @Override public int getMinorVersion() { return DRIVER_MINOR_VERSION; } }
4,578
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbPreparedStatement.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import com.google.common.collect.ImmutableList; import lombok.SneakyThrows; import software.amazon.documentdb.jdbc.common.PreparedStatement; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingService; import java.sql.Connection; import java.sql.ResultSetMetaData; import java.sql.SQLException; import static software.amazon.documentdb.jdbc.DocumentDbStatement.setDefaultFetchSize; /** * DocumentDb implementation of PreparedStatement. */ public class DocumentDbPreparedStatement extends PreparedStatement implements java.sql.PreparedStatement { private int queryTimeout = 0; private DocumentDbAllowDiskUseOption allowDiskUse = DocumentDbAllowDiskUseOption.DEFAULT; private final DocumentDbQueryExecutor queryExecutor; /** * DocumentDbPreparedStatement constructor, creates DocumentDbQueryExecutor and initializes super class. * @param connection Connection Object. * @param sql Sql query. * @throws SQLException if unable to construct a new {@link java.sql.PreparedStatement}. */ public DocumentDbPreparedStatement(final Connection connection, final String sql) throws SQLException { super(connection, sql); final DocumentDbConnection documentDbConnection = (DocumentDbConnection)getConnection(); setDefaultFetchSize(this, documentDbConnection.getConnectionProperties()); final DocumentDbConnectionProperties connectionProperties = documentDbConnection .getConnectionProperties(); final DocumentDbQueryMappingService mappingService = new DocumentDbQueryMappingService( connectionProperties, documentDbConnection.getDatabaseMetadata()); setAllowDiskUse(connectionProperties.getAllowDiskUseOption()); queryExecutor = new DocumentDbQueryExecutor( this, connectionProperties, mappingService, getQueryTimeout(), getFetchSize()); } @Override protected void cancelQuery(final boolean isClosing) throws SQLException { queryExecutor.cancelQuery(isClosing); } @Override public java.sql.ResultSet executeQuery() throws SQLException { verifyOpen(); queryExecutor.setFetchSize(getFetchSize()); return queryExecutor.executeQuery(getSql()); } @Override @SneakyThrows public ResultSetMetaData getMetaData() throws SQLException { verifyOpen(); if (getResultSet() == null) { final DocumentDbConnection connection = (DocumentDbConnection)getConnection(); final DocumentDbQueryMappingService mappingService = new DocumentDbQueryMappingService( connection.getConnectionProperties(), connection.getDatabaseMetadata()); return new DocumentDbResultSetMetaData(ImmutableList.copyOf(mappingService.get(getSql()).getColumnMetaData())); } return getResultSet().getMetaData(); } /** * Returns the query timeout setting, with a default value of zero indicating no time limit. * * @return the query timeout in seconds. * @throws SQLException If the statement is closed. */ @Override public int getQueryTimeout() throws SQLException { verifyOpen(); return queryTimeout; } /** * Sets the time limit for querying. A timeout of zero results in no time limit when querying. * * @param seconds The query timeout in seconds * @throws SQLException If the statement is closed. */ @Override public void setQueryTimeout(final int seconds) throws SQLException { verifyOpen(); queryTimeout = seconds; queryExecutor.setQueryTimeout(seconds); } /** * Gets the allow disk use option for the statement. * * @return one of the allow disk use options. * @throws SQLException if the connection is not open. */ public DocumentDbAllowDiskUseOption getAllowDiskUse() throws SQLException { verifyOpen(); return allowDiskUse; } /** * Sets the allow disk use indicator for the statement. * * @param allowDiskUse the indicator of whether to set the allow disk use option. * @throws SQLException if the connection is not open. */ public void setAllowDiskUse(final DocumentDbAllowDiskUseOption allowDiskUse) throws SQLException { verifyOpen(); this.allowDiskUse = allowDiskUse; } }
4,579
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbReadPreference.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; /** * The enumeration of read preferences for DocumentDb. */ public enum DocumentDbReadPreference { PRIMARY("primary"), PRIMARY_PREFERRED("primaryPreferred"), SECONDARY("secondary"), SECONDARY_PREFERRED("secondaryPreferred"), NEAREST("nearest"); private final String name; /** * Constructor for a read preference. * * @param name The value of the read preference. */ DocumentDbReadPreference(final String name) { this.name = name; } /** * Gets the string value of the read preference. * * @return The name of the read preference. */ public String getName() { return name; } /** * Returns DocumentDbReadPreference with a name that matches input string. * @param readPreferenceString name of the read preference. * @return DocumentDbReadPreference of string. */ public static DocumentDbReadPreference fromString(final String readPreferenceString) { for (DocumentDbReadPreference readPreference: DocumentDbReadPreference.values()) { if (readPreference.name.equals(readPreferenceString)) { return readPreference; } } return null; } }
4,580
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/DocumentDbStatement.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.Statement; import software.amazon.documentdb.jdbc.query.DocumentDbQueryMappingService; import java.sql.SQLException; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.FETCH_SIZE_DEFAULT; /** * DocumentDb implementation of DatabaseMetadata. */ class DocumentDbStatement extends Statement implements java.sql.Statement { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbStatement.class); private int queryTimeout; private final DocumentDbQueryExecutor queryExecutor; /** * DocumentDbStatement constructor, creates DocumentDbQueryExecutor and initializes super class. * * @param connection the connection. * @throws SQLException if unable to construct a new {@link java.sql.Statement}. */ DocumentDbStatement( final DocumentDbConnection connection) throws SQLException { super(connection); setDefaultFetchSize(this, connection.getConnectionProperties()); final DocumentDbQueryMappingService mappingService = new DocumentDbQueryMappingService( connection.getConnectionProperties(), connection.getDatabaseMetadata()); queryExecutor = new DocumentDbQueryExecutor( this, connection.getConnectionProperties(), mappingService, getQueryTimeout(), getFetchSize()); } /** * Sets the default fetch size on the {@link java.sql.Statement} object. * * @param statement the Statement to set. * @param properties the * @throws SQLException if unable to set the fetch size. */ static void setDefaultFetchSize( final java.sql.Statement statement, final DocumentDbConnectionProperties properties) throws SQLException { Integer defaultFetchSize = properties.getDefaultFetchSize(); if (defaultFetchSize == null) { defaultFetchSize = FETCH_SIZE_DEFAULT; } if (defaultFetchSize != FETCH_SIZE_DEFAULT) { LOGGER.debug("Setting custom default fetch size: {}", defaultFetchSize); } statement.setFetchSize(defaultFetchSize); } /** * DocumentDbStatement constructor. Accepts a DocumentDbQueryExecutor that can * be used for testing purposes. * @param connection the connection. * @param queryExecutor the DocumentDbQueryExecutor. */ DocumentDbStatement(final DocumentDbConnection connection, final DocumentDbQueryExecutor queryExecutor) { super(connection); this.queryExecutor = queryExecutor; } @Override protected void cancelQuery(final boolean isClosing) throws SQLException { queryExecutor.cancelQuery(isClosing); } @Override public java.sql.ResultSet executeQuery(final String sql) throws SQLException { verifyOpen(); queryExecutor.setFetchSize(getFetchSize()); return queryExecutor.executeQuery(sql); } @Override public int getQueryTimeout() throws SQLException { verifyOpen(); return queryTimeout; } @Override public void setQueryTimeout(final int seconds) throws SQLException { verifyOpen(); queryTimeout = seconds; queryExecutor.setQueryTimeout(seconds); } }
4,581
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbSchemaFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.schema.Schema; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata; public class DocumentDbSchemaFactory { /** * Creates {@link Schema} from database metadata. * * @param databaseMetadata the database metadata. * @param connectionProperties the connection properties. * @return a new {@link Schema} for the database. */ public static Schema create(final DocumentDbDatabaseSchemaMetadata databaseMetadata, final DocumentDbConnectionProperties connectionProperties) { return new DocumentDbSchema(databaseMetadata, connectionProperties); } }
4,582
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbSort.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.util.Util; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; /** * Implementation of {@link Sort} * relational expression in MongoDB. */ public class DocumentDbSort extends Sort implements DocumentDbRel { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbSort.class.getName()); /** * Creates a new {@link DocumentDbSort} * @param cluster the cluster. * @param traitSet the trait set. * @param child the child * @param collation the collation * @param offset the offset node. * @param fetch the fetch node. */ public DocumentDbSort(final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode child, final RelCollation collation, final RexNode offset, final RexNode fetch) { super(cluster, traitSet, child, collation, offset, fetch); assert getConvention() == DocumentDbRel.CONVENTION; assert getConvention() == child.getConvention(); } @Override public @Nullable RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) { final RelOptCost relOptCost = super.computeSelfCost(planner, mq); return relOptCost != null ? relOptCost.multiplyBy(DocumentDbRules.SORT_COST_FACTOR) : null; } @Override public Sort copy(final RelTraitSet traitSet, final RelNode input, final RelCollation newCollation, final RexNode offset, final RexNode fetch) { return new DocumentDbSort(getCluster(), traitSet, input, collation, offset, fetch); } @Override public void implement(final Implementor implementor) { implementor.visitChild(0, getInput()); if (!collation.getFieldCollations().isEmpty()) { final List<String> keys = new ArrayList<>(); for (RelFieldCollation fieldCollation : collation.getFieldCollations()) { // DocumentDB: modified - start final List<String> names = DocumentDbRules.mongoFieldNames(getRowType(), implementor.getMetadataTable()); final String name = names.get(fieldCollation.getFieldIndex()); keys.add(DocumentDbRules.maybeQuote(name) + ": " + direction(fieldCollation)); // DocumentDB: modified - end if (false) { // TODO: NULLS FIRST and NULLS LAST switch (fieldCollation.nullDirection) { case FIRST: break; case LAST: break; default: break; } } } implementor.add(null, "{$sort: " + Util.toString(keys, "{", ", ", "}") + "}"); } if (offset != null) { implementor.add(null, "{$skip: " + ((RexLiteral) offset).getValue() + "}"); } if (fetch != null) { implementor.add(null, "{$limit: {$numberLong: \"" + ((RexLiteral) fetch).getValue() + "\"}}"); } LOGGER.info("Created sort and row limit stages of pipeline."); LOGGER.debug("Pipeline stages added: {}", implementor.getList().stream() .map(c -> c.right) .toArray()); } private static int direction(final RelFieldCollation fieldCollation) { switch (fieldCollation.getDirection()) { case DESCENDING: case STRICTLY_DESCENDING: return -1; case ASCENDING: case STRICTLY_ASCENDING: default: return 1; } } }
4,583
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbJoin.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.json.JsonMapper; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Streams; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.UnwindOptions; import lombok.SneakyThrows; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.util.JsonBuilder; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataTable; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.combinePath; /** * Implementation of {@link Join} in DocumentDb. */ public class DocumentDbJoin extends Join implements DocumentDbRel { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbJoin.class.getName()); /** * Creates a new {@link DocumentDbJoin} * * @param cluster the cluster. * @param traitSet the trait set. * @param left the left node. * @param right the right node. * @param condition the condition. * @param joinType the join type. */ public DocumentDbJoin( final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode left, final RelNode right, final RexNode condition, final JoinRelType joinType) { super(cluster, traitSet, ImmutableList.of(), left, right, condition, ImmutableSet.of(), joinType); assert getConvention() == DocumentDbRel.CONVENTION; } @Override public @Nullable RelOptCost computeSelfCost( final RelOptPlanner planner, final RelMetadataQuery mq) { final RelOptCost relOptCost = super.computeSelfCost(planner, mq); return relOptCost != null ? relOptCost.multiplyBy(DocumentDbRules.JOIN_COST_FACTOR) : null; } @Override public Join copy( final RelTraitSet traitSet, final RexNode conditionExpr, final RelNode left, final RelNode right, final JoinRelType joinType, final boolean semiJoinDone) { return new DocumentDbJoin(getCluster(), traitSet, left, right, conditionExpr, joinType); } @Override public void implement(final Implementor implementor) { // Visit all nodes to the left of the join. implementor.setJoin(true); implementor.visitChild(0, getLeft()); final DocumentDbTable leftTable = implementor.getDocumentDbTable(); final DocumentDbSchemaTable leftMetadata = implementor.getMetadataTable(); // Create a new implementor and visit all nodes to the right of the join. // This implementor can contain operations specific to the right. final Implementor rightImplementor = new Implementor(implementor.getRexBuilder()); rightImplementor.setJoin(true); rightImplementor.visitChild(0, getRight()); final DocumentDbTable rightTable = rightImplementor.getDocumentDbTable(); final DocumentDbSchemaTable rightMetadata = rightImplementor.getMetadataTable(); if (leftTable.getCollectionName().equals(rightTable.getCollectionName())) { joinSameCollection( implementor, rightImplementor, leftTable.getCollectionName(), leftMetadata, rightMetadata); } else { joinDifferentCollections( implementor, rightImplementor, leftTable.getCollectionName(), rightTable.getCollectionName(), leftMetadata, rightMetadata); } implementor.setJoin(false); } /** * Performs a "join" on tables from the same collection by combining their metadata and * filtering out null rows based on join type. * This is only applicable for joins where we are only "denormalizing" virtual tables by joining * on foreign keys. * * @param implementor the implementor from the left side of the join. Operations are * added to the left. * @param rightImplementor the implementor from the right side of the join. * @param collectionName The collection both tables are from. * @param leftTable the metadata of the left side of the join. * @param rightTable the metadata of the right side of the join. */ private void joinSameCollection( final Implementor implementor, final Implementor rightImplementor, final String collectionName, final DocumentDbSchemaTable leftTable, final DocumentDbSchemaTable rightTable) { validateSameCollectionJoin(leftTable, rightTable); final List<Pair<String, String>> leftList = implementor.getList(); implementor.setList(new ArrayList<>()); // Eliminate null (i.e. "unmatched") rows from any virtual tables based on join type. // If an inner join, eliminate any null rows from either table. // If a left outer join, eliminate the null rows of the left side. // If a right outer join, eliminate the null rows of the right side. final ImmutableCollection<DocumentDbSchemaColumn> leftFilterColumns = getFilterColumns(leftTable); final ImmutableCollection<DocumentDbSchemaColumn> rightFilterColumns = getFilterColumns(rightTable); final Supplier<String> leftFilter = () -> buildFieldsExistMatchFilter(leftFilterColumns); final Supplier<String> rightFilter = () -> buildFieldsExistMatchFilter(rightFilterColumns); final String filterLeft; final String filterRight; final boolean rightIsVirtual = isTableVirtual(rightTable); final boolean leftIsVirtual = isTableVirtual(leftTable); // Filter out unneeded columns from the left and right sides. final Map<String, DocumentDbSchemaColumn> leftColumns = getRequiredColumns(leftTable, this::getLeft); final Map<String, DocumentDbSchemaColumn> rightColumns = getRequiredColumns(rightTable, this::getRight); // Create a new metadata table representing the denormalized form that will be used // in later parts of the query. Resolve collisions from the right table. final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap = new LinkedHashMap<>(leftColumns); final List<String> resolutions = new ArrayList<>(); boolean resolutionNeedsUnwind = implementor.isResolutionNeedsUnwind() || rightImplementor.isResolutionNeedsUnwind(); final Set<String> usedKeys = new LinkedHashSet<>(columnMap.keySet()); for (Entry<String, DocumentDbSchemaColumn> entry : rightColumns.entrySet()) { final String key = entry.getKey(); if (columnMap.containsKey(key)) { final String newKey = SqlValidatorUtil.uniquify(key, usedKeys, SqlValidatorUtil.EXPR_SUGGESTER); final DocumentDbSchemaColumn leftColumn = columnMap.get(key); // If the columns correspond to the same field, they may have different values depending on // join type. Create a new column and add a new field. if (entry.getValue().getFieldPath().equals(leftColumn.getFieldPath())) { columnMap.put(newKey, entry.getValue()); final DocumentDbSchemaColumn column = entry.getValue(); final DocumentDbSchemaColumn newRightColumn = DocumentDbMetadataColumn.builder() .fieldPath(column.getFieldPath()) .sqlName(newKey) .sqlType(column.getSqlType()) .dbType(column.getDbType()) .isIndex(column.isIndex()) .isPrimaryKey(column.isPrimaryKey()) .foreignKeyTableName(column.getForeignKeyTableName()) .foreignKeyColumnName(column.getForeignKeyColumnName()) .resolvedPath(newKey) .build(); columnMap.put(newKey, newRightColumn); resolutionNeedsUnwind = column.isIndex() || resolutionNeedsUnwind; // Handle any column renames. final String leftPath = DocumentDbRules.getPath(leftColumn, true); final String rightPath = DocumentDbRules.getPath(entry.getValue(), true); handleColumnRename( resolutions, newKey, rightPath, rightIsVirtual, rightFilterColumns); handleColumnRename( resolutions, leftPath, leftPath, leftIsVirtual, leftFilterColumns); } else { columnMap.put(newKey, entry.getValue()); } } else { columnMap.put(key, entry.getValue()); } } implementor.setResolutionNeedsUnwind(resolutionNeedsUnwind); // Add any unwinds from the right. rightImplementor.getUnwinds().forEach(op -> { if (!implementor.getUnwinds().contains(op)) { implementor.addUnwind(op); } }); // Add the renames. if (!resolutions.isEmpty()) { final String newFields = Util.toString(resolutions, "{", ", ", "}"); final String aggregateString = "{ $addFields : " + newFields + "}"; implementor.addCollisionResolution(aggregateString); } switch (getJoinType()) { case INNER: filterLeft = leftFilter.get(); filterRight = rightFilter.get(); if (filterLeft != null) { implementor.add(null, filterLeft); } if (filterRight != null) { implementor.add(null, filterRight); } implementor.setNullFiltered(true); break; case LEFT: filterLeft = leftFilter.get(); if (filterLeft != null) { implementor.add(null, filterLeft); } implementor.setNullFiltered(true); break; default: throw new IllegalArgumentException( SqlError.lookup(SqlError.UNSUPPORTED_JOIN_TYPE, getJoinType().name())); } // Add any remaining operations from the left. leftList.forEach(pair -> implementor.add(pair.left, pair.right)); // Add remaining operations from the right. rightImplementor.getList().forEach(pair -> implementor.add(pair.left, pair.right)); final DocumentDbMetadataTable metadata = DocumentDbMetadataTable .builder() .sqlName(leftTable.getSqlName()) .collectionName(collectionName) .columns(columnMap) .build(); final DocumentDbTable joinedTable = new DocumentDbTable(collectionName, metadata); implementor.setDocumentDbTable(joinedTable); implementor.setMetadataTable(metadata); } /** * Gets whether the given table is virtual - whether it contains foreign key columns. * * @param table the table to test. * @return {@code true} if table contains foreign key columns, {@code false}, otherwise. */ static boolean isTableVirtual(final DocumentDbSchemaTable table) { return table.getColumnMap().values().stream() .anyMatch(c -> c.getForeignKeyTableName() != null && c.getForeignKeyColumnName() != null); } /** * Renames columns appropriately for the join. Adds a condition on whether the fields of * a virtual table are not null. * * @param renames the collection of renamed columns. * @param newKey the new key (column name) for the column. * @param originalPath the original path. * @param tableIsVirtual indicator of whether table is virtual. * @param filterColumns list of columns to filter. */ private void handleColumnRename( final List<String> renames, final String newKey, final String originalPath, final boolean tableIsVirtual, final ImmutableCollection<DocumentDbSchemaColumn> filterColumns) { // Set the fields to be their original value unless their parent table is null for this row. final StringBuilder ifNullBuilder = new StringBuilder(); final String newPath = (tableIsVirtual && tryBuildIfNullFieldsCondition( filterColumns, ifNullBuilder)) ? "{ $cond : [ " + ifNullBuilder + ", " + DocumentDbRules.maybeQuote("$" + originalPath) + ", null ] }" : DocumentDbRules.maybeQuote("$" + originalPath); renames.add(DocumentDbRules.maybeQuote(newKey) + ": " + newPath); } /** * Gets the list of columns to add to the filter. * * @param table the table to get the complete list of columns. * @return a collection of columns to filter. Can return an empty list. */ static ImmutableList<DocumentDbSchemaColumn> getFilterColumns( final DocumentDbSchemaTable table) { // We don't need to check for // 1. primary keys, // 2. foreign keys (from another table) // 3. columns that are "virtual" (i.e. arrays, structures) final List<DocumentDbSchemaColumn> columns = table.getColumnMap().values().stream() .filter(c -> !c.isPrimaryKey() && c.getForeignKeyTableName() == null && !(c instanceof DocumentDbMetadataColumn && ((DocumentDbMetadataColumn)c).isGenerated()) && !(c.getSqlType() == null || c.getSqlType() == JdbcType.ARRAY || c.getSqlType() == JdbcType.JAVA_OBJECT || c.getSqlType() == JdbcType.NULL)) .collect(Collectors.toList()); return ImmutableList.copyOf(columns); } /** * Creates the aggregate step for matching all provided fields. * * @param columns the columns that represents a field. * @return an aggregate step in JSON format if any field exist, otherwise, null. */ static String buildFieldsExistMatchFilter( final ImmutableCollection<DocumentDbSchemaColumn> columns) { final StringBuilder builder = new StringBuilder(); if (!tryBuildFieldsExists(columns, builder)) { return null; } builder.insert(0, "{ \"$match\": "); builder.append(" }"); return builder.toString(); } private static boolean tryBuildFieldsExists( final ImmutableCollection<DocumentDbSchemaColumn> columns, final StringBuilder builder) { int columnCount = 0; for (DocumentDbSchemaColumn column : columns) { if (columnCount != 0) { builder.append(", "); } builder.append("{ "); builder.append(DocumentDbRules.maybeQuote(column.getFieldPath())); builder.append(": { \"$exists\": true } }"); columnCount++; } if (columnCount == 0) { return false; } if (columnCount > 1) { builder.insert(0, "{ \"$or\": [ "); builder.append(" ] }"); } return true; } private static boolean tryBuildIfNullFieldsCondition( final ImmutableCollection<DocumentDbSchemaColumn> columns, final StringBuilder builder) { int columnCount = 0; for (DocumentDbSchemaColumn column : columns) { if (columnCount != 0) { builder.append(", "); } builder.append("{ $ifNull: [ "); builder.append(DocumentDbRules.maybeQuote("$" + column.getFieldPath())); builder.append(", false ] }"); columnCount++; } if (columnCount == 0) { return false; } if (columnCount > 1) { builder.insert(0, "{ \"$or\": [ "); builder.append(" ] }"); } return true; } /** * Validates that the same collection join is only denormalizing any virtual tables * by checking the join keys and join conditions. * * @param left the metadata of the left side of the join. * @param right the metadata of the right side of the join. */ private void validateSameCollectionJoin( final DocumentDbSchemaTable left, final DocumentDbSchemaTable right) { // Extract the join keys. // We can ignore filterNulls for this case as primary and foreign keys are not nullable. final List<Integer> leftKeys = new ArrayList<>(); final List<Integer> rightKeys = new ArrayList<>(); final List<RexNode> nonEquiList = new ArrayList<>(); final List<Boolean> filterNulls = new ArrayList<>(); RelOptUtil.splitJoinCondition( getLeft(), getRight(), getCondition(), leftKeys, rightKeys, filterNulls, nonEquiList); // Check that there are only equality conditions. if (!nonEquiList.isEmpty()) { throw new IllegalArgumentException(SqlError.lookup(SqlError.EQUIJOINS_ON_FK_ONLY)); } // Check that all equality conditions are actually comparing the same fields. final List<String> leftNames = DocumentDbRules.mongoFieldNames(getLeft().getRowType(), left, true); final List<String> rightNames = DocumentDbRules.mongoFieldNames(getRight().getRowType(), right, true); final List<String> leftKeyNames = leftKeys.stream().map(leftNames::get).collect(Collectors.toList()); final List<String> rightKeyNames = rightKeys.stream().map(rightNames::get).collect(Collectors.toList()); if (!leftKeyNames.equals(rightKeyNames)) { throw new IllegalArgumentException(SqlError.lookup(SqlError.EQUIJOINS_ON_FK_ONLY)); } final List<String> rightSidePrimaryKeys = Streams .concat(right.getColumns().stream()) .filter(DocumentDbSchemaColumn::isPrimaryKey) .map(c -> c.isIndex() ? c.getSqlName() : c.getFieldPath()) .distinct() .sorted() .collect(Collectors.toList()); final List<String> leftSidePrimaryKeys = Streams .concat(left.getColumns().stream()) .filter(DocumentDbSchemaColumn::isPrimaryKey) .map(c -> c.isIndex() ? c.getSqlName() : c.getFieldPath()) .distinct() .sorted() .collect(Collectors.toList()); Collections.sort(leftKeyNames); // Check that only the necessary primary key columns are used. validateMinimumPrimaryKeysUsage(leftKeyNames, leftSidePrimaryKeys, rightSidePrimaryKeys); } /** * Validates if the keys used in the join matches with the set of primary key columns shared between the tables. * * @param keyNames keys used in the join condition * @param leftSidePrimaryKeys primary keys from the left side of the join. * @param rightSidePrimaryKeys primary keys from the right side of the join. */ protected void validateMinimumPrimaryKeysUsage(final List<String> keyNames, final List<String> leftSidePrimaryKeys, final List<String> rightSidePrimaryKeys) { // Find the common elements between these lists. leftSidePrimaryKeys.retainAll(rightSidePrimaryKeys); // Check that join condition is not missing any keys. if (!keyNames.containsAll(leftSidePrimaryKeys)) { final List<String> keysMissing = new ArrayList<>(leftSidePrimaryKeys); keysMissing.removeAll(keyNames); throw new IllegalArgumentException( SqlError.lookup(SqlError.JOIN_MISSING_PRIMARY_KEYS, keysMissing)); } } /** * Performs a "join" on tables from the different collections using a $lookup stage. * * @param implementor the implementor from the left side of the join. Operations are * added to the left. * @param rightImplementor the implementor from the right side of the join. * @param leftCollectionName the name of the collection of the left table. * @param rightCollectionName the name of the collection of the right table. * @param leftTable the metadata of the left side of the join. * @param rightTable the metadata of the right side of the join. */ @SneakyThrows private void joinDifferentCollections( final Implementor implementor, final Implementor rightImplementor, final String leftCollectionName, final String rightCollectionName, final DocumentDbSchemaTable leftTable, final DocumentDbSchemaTable rightTable) { // Remove null rows from right, if any. DocumentDbToEnumerableConverter.handleVirtualTable(rightImplementor); // Validate that this is a simple equality join. validateDifferentCollectionJoin(); // Determine the new field in the joined documents that will hold the matched rows from the right. final String rightMatches = rightTable.getSqlName(); // Filter out unneeded columns from the left and right sides. final Map<String, DocumentDbSchemaColumn> leftColumns = getRequiredColumns(leftTable, this::getLeft); final Map<String, DocumentDbSchemaColumn> rightColumns = getRequiredColumns(rightTable, this::getRight); // Determine the new metadata. Handle any naming collisions from the right side. Columns // from the right will now be nested under field specified by rightMatches. final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap = new LinkedHashMap<>(leftColumns); final Set<String> usedKeys = new LinkedHashSet<>(columnMap.keySet()); for (Entry<String, DocumentDbSchemaColumn> entry : rightColumns.entrySet()) { final String key = SqlValidatorUtil.uniquify(entry.getKey(), usedKeys, SqlValidatorUtil.EXPR_SUGGESTER); final DocumentDbSchemaColumn oldColumn = entry.getValue(); final DocumentDbMetadataColumn newColumn = DocumentDbMetadataColumn.builder() .sqlName(oldColumn.getSqlName()) .fieldPath(oldColumn.getFieldPath()) .dbType(oldColumn.getDbType()) .isPrimaryKey(oldColumn.isPrimaryKey()) .isIndex(oldColumn.isIndex()) .foreignKeyColumnName(oldColumn.getForeignKeyColumnName()) .foreignKeyTableName(oldColumn.getForeignKeyTableName()) .resolvedPath(combinePath(rightMatches, DocumentDbRules.getPath(oldColumn, false))) .build(); columnMap.put(key, newColumn); } final DocumentDbMetadataTable metadata = DocumentDbMetadataTable .builder() .sqlName(leftCollectionName) .columns(columnMap) .build(); final DocumentDbTable joinedTable = new DocumentDbTable(leftCollectionName, metadata); implementor.setDocumentDbTable(joinedTable); implementor.setMetadataTable(metadata); // Add the lookup stage. This is the stage that "joins" the 2 collections. final JsonBuilder jsonBuilder = new JsonBuilder(); final Map<String, Object> lookupMap = new LinkedHashMap<>(); final Map<String, Object> lookupFields = new LinkedHashMap<>(); // 1. Add collection to join. lookupFields.put("from", rightCollectionName); // 2. Fields from the left need to be in let so they can be used in $match. final Map<String, String> letExpressions = leftColumns.values().stream() .collect( Collectors.toMap( DocumentDbSchemaColumn::getSqlName, column -> "$" + DocumentDbRules.getPath(column, false))); lookupFields.put("let", letExpressions); // 3. Add any stages from the right implementor. Convert the json strings // into objects so they can be added as a list to the lookup pipeline. final List<Map<String, Object>> stages = new ArrayList<>(); final ObjectMapper mapper = JsonMapper.builder() .configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true) .build(); for (Pair<String, String> operations : rightImplementor.getList()) { final String stage = operations.right; final Map<String, Object> map = mapper.readValue(stage, new TypeReference<LinkedHashMap<String, Object>>() { }); stages.add(map); } // 4. Determine the $match stage for the pipeline. This is the join condition. final JoinTranslator translator = new JoinTranslator(implementor.getRexBuilder(), leftColumns, rightColumns); stages.add(translator.translateMatch(getCondition())); // 5. Add all stages in order to the pipeline. lookupFields.put("pipeline", stages); // 6. Add the new field where the matches will be placed. lookupFields.put("as", rightMatches); lookupMap.put("$lookup", lookupFields); implementor.add(null, jsonBuilder.toJsonString(lookupMap)); // Unwind the matched rows. Preserve null/empty arrays (unmatched rows) depending on join type. final UnwindOptions opts = new UnwindOptions(); switch (getJoinType()) { case INNER: // Remove rows for which there were no matches. opts.preserveNullAndEmptyArrays(false); break; case LEFT: // Keep rows for which there were no matches. opts.preserveNullAndEmptyArrays(true); break; default: throw new IllegalArgumentException(SqlError.lookup(SqlError.UNSUPPORTED_JOIN_TYPE, getJoinType().name())); } implementor.add(null, String.valueOf(Aggregates.unwind("$" + rightMatches, opts))); LOGGER.debug("Created join stages of pipeline."); LOGGER.debug("Pipeline stages added: {}", implementor.getList().stream() .map(c -> c.right) .toArray()); } /** * Temporary check to reject joins the translator may not handle correctly. */ private void validateDifferentCollectionJoin() { // Extract the join keys. final List<Integer> leftKeys = new ArrayList<>(); final List<Integer> rightKeys = new ArrayList<>(); final List<RexNode> nonEquiList = new ArrayList<>(); final List<Boolean> filterNulls = new ArrayList<>(); RelOptUtil.splitJoinCondition( getLeft(), getRight(), getCondition(), leftKeys, rightKeys, filterNulls, nonEquiList); // Check that there is only a single equality condition and no non equality conditions. if (!nonEquiList.isEmpty() || leftKeys.size() != 1 || rightKeys.size() != 1) { throw new IllegalArgumentException(SqlError.lookup(SqlError.SINGLE_EQUIJOIN_ONLY)); } } private LinkedHashMap<String, DocumentDbSchemaColumn> getRequiredColumns( final DocumentDbSchemaTable table, final Supplier<RelNode> getNode) { final List<String> fieldNames = getNode.get().getRowType().getFieldNames(); return table.getColumnMap().entrySet().stream() .filter(entry -> fieldNames.contains(entry.getKey())) .collect(Collectors.toMap( Entry::getKey, Entry::getValue, (u, v) -> u, LinkedHashMap::new)); } /** * POC of a translator for the join condition. * Based on Translator class in DocumentDbFilter. For $lookup, we need to put * the match conditions inside $expr so we can reference fields from the left. * We also specify the conditions as $gte: [ $field, $$field2 ] rather than field : { $gte: $field2 } */ private static class JoinTranslator { private final RexBuilder rexBuilder; private final List<String> fieldNames; JoinTranslator( final RexBuilder rexBuilder, final Map<String, DocumentDbSchemaColumn> leftColumns, final Map<String, DocumentDbSchemaColumn> rightColumns) { this.rexBuilder = rexBuilder; // The indexes used by RexInputRef nodes follows the order in // the output row (getRowType()) which is a concatenation of the 2 // input row types (getLeft.getRowType() and getRight.getRowType()). // But we cannot just use mongoFieldNames with the merged metadata table // because the left fields will be referenced by their names as specified in "let" // while the right fields will be referenced by their original paths. // Left field names use their names as specified in the let field and need "$$" final List<String> leftFieldNames = leftColumns.values().stream() .map(column -> "$$" + column.getSqlName()) .collect(Collectors.toList()); // Right field names use their path combined with "$". final List<String> rightFieldNames = rightColumns.values().stream() .map(column -> "$" + DocumentDbRules.getPath(column, false)) .collect(Collectors.toList()); this.fieldNames = Stream.concat(leftFieldNames.stream(), rightFieldNames.stream()) .collect(Collectors.toList()); } private Map<String, Object> translateMatch(final RexNode condition) { final Map<String, Object> matchMap = new LinkedHashMap<>(); final Map<String, Object> exprMap = new LinkedHashMap<>(); exprMap.put("$expr", translateOr(condition)); matchMap.put("$match", exprMap); return matchMap; } /** * Translates a condition that may be an OR of other conditions. */ private Object translateOr(final RexNode condition) { final RexNode condition2 = RexUtil.expandSearch(rexBuilder, null, condition); // Breaks down the condition by ORs. final List<Object> list = new ArrayList<>(); for (RexNode node : RelOptUtil.disjunctions(condition2)) { list.add(translateAnd(node)); } if (list.size() == 1) { return list.get(0); } final Map<String, Object> map = new LinkedHashMap<>(); map.put("$or", list); return map; } /** * Translates a condition that may be an AND of other conditions. */ private Object translateAnd(final RexNode node0) { // Breaks down the condition by ANDs. But the ANDs may have nested ORs! // These will break it. final List<Map<String, Object>> list = new ArrayList<>(); for (RexNode node : RelOptUtil.conjunctions(node0)) { list.add(translateMatch2(node)); } if (list.size() == 1) { return list.get(0); } final Map<String, Object> map = new LinkedHashMap<>(); map.put("$and", list); return map; } private Object getValue(final RexNode node) { switch (node.getKind()) { case INPUT_REF: return fieldNames.get(((RexInputRef) node).getIndex()); case LITERAL: return ((RexLiteral) node).getValue2(); default: // Does not handle a node that is CAST or ITEM yet. throw new AssertionError("cannot translate " + node); } } private Map<String, Object> translateMatch2(final RexNode node) { switch (node.getKind()) { case EQUALS: return translateBinary("$eq", (RexCall) node); case LESS_THAN: return translateBinary("$lt", (RexCall) node); case LESS_THAN_OR_EQUAL: return translateBinary("$lte", (RexCall) node); case NOT_EQUALS: return translateBinary("$ne", (RexCall) node); case GREATER_THAN: return translateBinary("$gt", (RexCall) node); case GREATER_THAN_OR_EQUAL: return translateBinary("$gte", (RexCall) node); default: // Does not handle that the node may be a nested OR node. throw new AssertionError("cannot translate " + node); } } /** * Translates a call to a binary operator. */ private Map<String, Object> translateBinary(final String op, final RexCall call) { final Map<String, Object> map = new LinkedHashMap<>(); final Object left = getValue(call.operands.get(0)); final Object right = getValue(call.operands.get(1)); final List<Object> items = new ArrayList<>(); items.add(left); items.add(right); map.put(op, items); return map; } } }
4,584
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbTable.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.google.common.collect.ImmutableMap; import lombok.SneakyThrows; import org.apache.calcite.adapter.java.AbstractQueryableTable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.Statistic; import org.apache.calcite.schema.Statistics; import org.apache.calcite.schema.TranslatableTable; import org.apache.calcite.schema.impl.AbstractTableQueryable; import org.apache.calcite.sql.type.SqlTypeName; import org.bson.BsonDocument; import org.bson.conversions.Bson; import org.checkerframework.checker.nullness.qual.NonNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.common.utilities.JdbcType; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class DocumentDbTable extends AbstractQueryableTable implements TranslatableTable { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbTable.class); private static volatile Map<JdbcType, RelDataType> jdbcTypeToRelDataType = null; private final String collectionName; private final DocumentDbSchemaTable tableMetadata; private final Statistic statistic; protected DocumentDbTable( final String collectionName, final DocumentDbSchemaTable tableMetadata) { super(Object[].class); this.collectionName = collectionName; this.tableMetadata = tableMetadata; this.statistic = tableMetadata.getEstimatedRecordCount() == DocumentDbSchemaTable.UNKNOWN_RECORD_COUNT ? Statistics.UNKNOWN : Statistics.of(tableMetadata.getEstimatedRecordCount(), null); } @Override public Statistic getStatistic() { return statistic; } @Override public String toString() { return "DocumentDbTable {" + tableMetadata.getSqlName() + "}"; } public String getCollectionName() { return this.collectionName; } @SneakyThrows @Override public RelDataType getRowType(final RelDataTypeFactory typeFactory) { final List<Entry<String, RelDataType>> fieldList = new ArrayList<>(); SimpleEntry<String, RelDataType> field; JdbcType sqlType; RelDataType relDataType; boolean nullable; if (jdbcTypeToRelDataType == null) { initializeRelDataTypeMap(typeFactory); } for (Entry<String, DocumentDbSchemaColumn> entry : tableMetadata.getColumnMap().entrySet()) { sqlType = entry.getValue().getSqlType(); if (sqlType == JdbcType.ARRAY || sqlType == JdbcType.JAVA_OBJECT) { continue; } relDataType = jdbcTypeToRelDataType.get(sqlType); if (relDataType == null) { throw SqlError.createSQLException( LOGGER, SqlState.DATA_TYPE_TRANSFORM_VIOLATION, SqlError.UNSUPPORTED_TYPE, sqlType); } nullable = !entry.getValue().isPrimaryKey(); field = new SimpleEntry<>(entry.getKey(), typeFactory.createTypeWithNullability(relDataType, nullable)); fieldList.add(field); } return typeFactory.createStructType(fieldList); } @Override public <T> Queryable<T> asQueryable(final QueryProvider queryProvider, final SchemaPlus schema, final String tableName) { return new DocumentDbQueryable<>(queryProvider, schema, this, tableName); } @Override public RelNode toRel( final RelOptTable.ToRelContext context, final RelOptTable relOptTable) { final RelOptCluster cluster = context.getCluster(); return new DocumentDbTableScan(cluster, cluster.traitSetOf(DocumentDbRel.CONVENTION), relOptTable, this, null, tableMetadata); } // TODO: Investigate using find() here for simpler queries. // See: https://github.com/aws/amazon-documentdb-jdbc-driver/issues/240 /** Executes an "aggregate" operation on the underlying collection. * * <p>For example: * <code>zipsTable.aggregate( * "{$filter: {state: 'OR'}", * "{$group: {_id: '$city', c: {$sum: 1}, p: {$sum: '$pop'}}}") * </code></p> * * @param databaseName Name of the database * @param fields List of fields to project; or null to return map * @param paths List of paths * @param operations One or more JSON strings * @return Enumerator of results */ Enumerable<Object> aggregate( final String databaseName, final List<Entry<String, Class<?>>> fields, final List<String> paths, final List<String> operations) { final List<Bson> list = new ArrayList<>(); for (String operation : operations) { list.add(BsonDocument.parse(operation)); } // Return this instead of the anonymous class to get more information from CalciteSignature. return new DocumentDbEnumerable( databaseName, collectionName, list, paths); } /** Implementation of {@link org.apache.calcite.linq4j.Queryable} based on * a {@link DocumentDbTable}. * * @param <T> element type */ public static class DocumentDbQueryable<T> extends AbstractTableQueryable<T> { DocumentDbQueryable(final QueryProvider queryProvider, final SchemaPlus schema, final DocumentDbTable table, final String tableName) { super(queryProvider, schema, table, tableName); } @SuppressWarnings("unchecked") @Override public Enumerator<T> enumerator() { //noinspection unchecked return (Enumerator<T>) new DocumentDbEnumerator(); } private String getDatabaseName() { return getUnwrappedDocumentDbSchema().getDatabaseName(); } @NonNull private DocumentDbSchema getUnwrappedDocumentDbSchema() { final DocumentDbSchema result = this.schema.unwrap(DocumentDbSchema.class); if (result != null) { return result; } throw new NullPointerException(); } private DocumentDbTable getTable() { return (DocumentDbTable) table; } /** Called via code-generation. * * @see DocumentDbMethod#MONGO_QUERYABLE_AGGREGATE * @return an enumerable of the aggregate pipeline */ @SuppressWarnings("UnusedDeclaration") public Enumerable<Object> aggregate(final List<Entry<String, Class<?>>> fields, final List<String> paths, final List<String> operations) { return getTable() .aggregate(getDatabaseName(), fields, paths, operations); } // TODO: Investigate using find() here for simpler queries. // See: https://github.com/aws/amazon-documentdb-jdbc-driver/issues/240 } private static synchronized void initializeRelDataTypeMap(final RelDataTypeFactory typeFactory) { if (jdbcTypeToRelDataType == null) { jdbcTypeToRelDataType = ImmutableMap.<JdbcType, RelDataType>builder() .put(JdbcType.BIGINT, typeFactory.createSqlType(SqlTypeName.BIGINT)) .put(JdbcType.BOOLEAN, typeFactory.createSqlType(SqlTypeName.BOOLEAN)) .put( JdbcType.DECIMAL, typeFactory.createSqlType( SqlTypeName.DECIMAL, typeFactory.getTypeSystem().getMaxPrecision(SqlTypeName.DECIMAL), typeFactory.getTypeSystem().getMaxScale(SqlTypeName.DECIMAL))) .put(JdbcType.DOUBLE, typeFactory.createSqlType(SqlTypeName.DOUBLE)) .put(JdbcType.INTEGER, typeFactory.createSqlType(SqlTypeName.INTEGER)) .put(JdbcType.NULL, typeFactory.createSqlType(SqlTypeName.VARCHAR)) .put(JdbcType.TIMESTAMP, typeFactory.createSqlType(SqlTypeName.TIMESTAMP)) .put( JdbcType.VARCHAR, typeFactory.createSqlType( SqlTypeName.VARCHAR, typeFactory.getTypeSystem().getMaxPrecision(SqlTypeName.VARCHAR))) .put( JdbcType.VARBINARY, typeFactory.createSqlType( SqlTypeName.VARBINARY, typeFactory.getTypeSystem().getMaxPrecision(SqlTypeName.VARBINARY))) .build(); } } }
4,585
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbRules.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.google.common.io.BaseEncoding; import lombok.Getter; import lombok.SneakyThrows; import org.apache.calcite.adapter.enumerable.RexImpTable.NullAs; import org.apache.calcite.adapter.enumerable.RexToLixTranslator; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.avatica.util.TimeUnitRange; import org.apache.calcite.plan.Convention; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.InvalidRelException; import org.apache.calcite.rel.RelCollations; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.convert.ConverterRule; import org.apache.calcite.rel.core.Sort; import org.apache.calcite.rel.logical.LogicalAggregate; import org.apache.calcite.rel.logical.LogicalFilter; import org.apache.calcite.rel.logical.LogicalJoin; import org.apache.calcite.rel.logical.LogicalProject; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlLibraryOperators; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.Bug; import org.apache.calcite.util.DateString; import org.apache.calcite.util.TimeString; import org.apache.calcite.util.Util; import org.apache.calcite.util.trace.CalciteTrace; import org.bson.BsonDocument; import org.bson.BsonType; import org.bson.BsonValue; import org.checkerframework.checker.nullness.qual.NonNull; import org.slf4j.Logger; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.time.DayOfWeek; import java.time.Instant; import java.time.Month; import java.time.format.TextStyle; import java.time.temporal.ChronoUnit; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.function.BiFunction; import java.util.regex.Pattern; import java.util.stream.Collectors; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.isNullOrWhitespace; /** * Rules and relational operators for * {@link DocumentDbRel#CONVENTION MONGO} * calling convention. */ public final class DocumentDbRules { private static final Logger LOGGER = CalciteTrace.getPlannerTracer(); private static final Pattern OBJECT_ID_PATTERN = Pattern.compile("^[0-9a-zA-Z]{24}$"); static final Map<String, String> ESCAPE_MAP; static { final Map<String, String> escapeMap = new HashMap<>(); escapeMap.put("[']", "\\'"); ESCAPE_MAP = Collections.unmodifiableMap(escapeMap); } private DocumentDbRules() { } @SuppressWarnings("MutablePublicArray") static final RelOptRule[] RULES = { DocumentDbSortRule.INSTANCE, DocumentDbFilterRule.INSTANCE, DocumentDbProjectRule.INSTANCE, DocumentDbAggregateRule.INSTANCE, DocumentDbJoinRule.INSTANCE }; // Factors for computing the cost of the DocumentDbRel nodes. public static final double PROJECT_COST_FACTOR = 0.1; public static final double FILTER_COST_FACTOR = 0.1; public static final double JOIN_COST_FACTOR = 0.1; public static final double SORT_COST_FACTOR = 0.05; public static final double ENUMERABLE_COST_FACTOR = 0.1; public static final int MAX_PROJECT_FIELDS = 50; /** Returns 'string' if it is a call to item['string'], null otherwise. */ static String isItem(final RexCall call) { if (call.getOperator() != SqlStdOperatorTable.ITEM) { return null; } final RexNode op0 = call.operands.get(0); final RexNode op1 = call.operands.get(1); if (op0 instanceof RexInputRef && ((RexInputRef) op0).getIndex() == 0 && op1 instanceof RexLiteral && ((RexLiteral) op1).getValue2() instanceof String) { return (String) ((RexLiteral) op1).getValue2(); } return null; } // DocumentDB: modified - start static List<String> mongoFieldNames(final RelDataType rowType, final DocumentDbSchemaTable metadataTable, final boolean useOriginalPaths) { // DocumentDB: modified - end return new AbstractList<String>() { @Override public String get(final int index) { // DocumentDB: modified - start final String name = rowType.getFieldList().get(index).getName(); final DocumentDbSchemaColumn column = metadataTable.getColumnMap().get(name); // Null columns are assumed to be fields generated by the query // such as aggregate expressions (ex: COUNT(*)). if (column == null) { return getNormalizedIdentifier(name); } return getPath(column, useOriginalPaths); // DocumentDB: modified - end } @Override public int size() { return rowType.getFieldCount(); } }; } static String getPath(final DocumentDbSchemaColumn column, final boolean useOriginalPaths) { final String path; if (column instanceof DocumentDbMetadataColumn && (!isNullOrWhitespace(((DocumentDbMetadataColumn) column).getResolvedPath()) && !useOriginalPaths)) { path = ((DocumentDbMetadataColumn) column).getResolvedPath(); } else if (column.isIndex()) { path = column.getSqlName(); } else { path = column.getFieldPath(); } if (isNullOrWhitespace(path)) { return null; } return path; } static List<String> mongoFieldNames(final RelDataType rowType, final DocumentDbSchemaTable metadataTable) { return mongoFieldNames(rowType, metadataTable, false); } static String maybeQuote(final String s) { if (!needsQuote(s, '\'')) { return s; } return quote(s, '\'', ESCAPE_MAP); } static String quote(final String s) { return quote(s, '\'', ESCAPE_MAP); } /** * Quotes a string with the given quote character, handling any escape substitutions provided. * @param s the value to quote. * @param quoteChar the character to quote the value with. * @param escapeMap the map of regular expressions to escaped replacement values. * @return an escaped and quoted value. */ public static String quote(final String s, final char quoteChar, final Map<String, String> escapeMap) { String value = s; for (Map.Entry<String, String> entry : escapeMap.entrySet()) { value = value.replaceAll(entry.getKey(), entry.getValue()); } return quoteChar + value + quoteChar; } private static boolean needsQuote(final String s, final char quoteChar) { for (int i = 0, n = s.length(); i < n; i++) { final char c = s.charAt(i); // DocumentDB: modified - start // Add quotes for embedded documents (contains '.') and // for field names with ':'. if (!Character.isJavaIdentifierPart(c) || c == quoteChar || c == '$' || c == '.' || c == ':') { return true; } // DocumentDB: modified - end } return false; } /** * Removes the '$' symbol from the start of a string, and replaces it with '_'. * @param fieldName The non-normalized string * @return The input string with '$' replaced by '_' */ static String getNormalizedIdentifier(final String fieldName) { return fieldName.startsWith("$") ? "_" + fieldName.substring(1) : fieldName; } /** Translator from {@link RexNode} to strings in MongoDB's expression * language. */ static class RexToMongoTranslator extends RexVisitorImpl<Operand> { private final JavaTypeFactory typeFactory; private final List<String> inFields; private final List<String> keys; private final DocumentDbSchemaTable schemaTable; private final Map<SqlOperator, BiFunction<RexCall, List<Operand>, Operand>> rexCallToMongoMap = new HashMap<>(); private static final Map<SqlOperator, String> MONGO_OPERATORS = new HashMap<>(); static { // Arithmetic MONGO_OPERATORS.put(SqlStdOperatorTable.DIVIDE, "$divide"); MONGO_OPERATORS.put(SqlStdOperatorTable.MULTIPLY, "$multiply"); MONGO_OPERATORS.put(SqlStdOperatorTable.MOD, "$mod"); MONGO_OPERATORS.put(SqlStdOperatorTable.PLUS, "$add"); MONGO_OPERATORS.put(SqlStdOperatorTable.MINUS, "$subtract"); MONGO_OPERATORS.put(SqlStdOperatorTable.MINUS_DATE, "$subtract"); // Boolean MONGO_OPERATORS.put(SqlStdOperatorTable.AND, "$and"); MONGO_OPERATORS.put(SqlStdOperatorTable.OR, "$or"); MONGO_OPERATORS.put(SqlStdOperatorTable.NOT, "$not"); // Comparison MONGO_OPERATORS.put(SqlStdOperatorTable.EQUALS, "$eq"); MONGO_OPERATORS.put(SqlStdOperatorTable.NOT_EQUALS, "$ne"); MONGO_OPERATORS.put(SqlStdOperatorTable.GREATER_THAN, "$gt"); MONGO_OPERATORS.put(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, "$gte"); MONGO_OPERATORS.put(SqlStdOperatorTable.LESS_THAN, "$lt"); MONGO_OPERATORS.put(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, "$lte"); MONGO_OPERATORS.put(SqlStdOperatorTable.IS_NULL, "$lte"); MONGO_OPERATORS.put(SqlStdOperatorTable.IS_NOT_NULL, "$gt"); } private void initializeRexCallToMongoMap(final Instant currentTime) { // Arithmetic rexCallToMongoMap.put(SqlStdOperatorTable.DIVIDE, (call, strings) -> getMongoAggregateForOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.MULTIPLY, (call, strings) -> getMongoAggregateForOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.MOD, (call, strings) -> getMongoAggregateForOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.PLUS, (call, strings) -> getMongoAggregateForOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.MINUS, (call, strings) -> getMongoAggregateForOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.DIVIDE_INTEGER, RexToMongoTranslator::getMongoAggregateForIntegerDivide); // Boolean rexCallToMongoMap.put(SqlStdOperatorTable.AND, (call, strings) -> getMongoAggregateForAndOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.OR, (call, strings) -> getMongoAggregateForOrOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.NOT, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); // Comparison rexCallToMongoMap.put(SqlStdOperatorTable.EQUALS, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); // Need to handle null value rexCallToMongoMap.put(SqlStdOperatorTable.NOT_EQUALS, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.GREATER_THAN, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.LESS_THAN, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, (call, strings) -> getMongoAggregateForComparisonOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.IS_NULL, (call, strings) -> getMongoAggregateForNullOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); rexCallToMongoMap.put(SqlStdOperatorTable.IS_NOT_NULL, (call, strings) -> getMongoAggregateForNullOperator( call, strings, MONGO_OPERATORS.get(call.getOperator()))); // Date operations rexCallToMongoMap.put(SqlStdOperatorTable.CURRENT_DATE, (call, operands) -> DateFunctionTranslator.translateCurrentTimestamp(currentTime)); rexCallToMongoMap.put(SqlStdOperatorTable.CURRENT_TIME, (call, operands) -> DateFunctionTranslator.translateCurrentTimestamp(currentTime)); rexCallToMongoMap.put(SqlStdOperatorTable.CURRENT_TIMESTAMP, (call, operands) -> DateFunctionTranslator.translateCurrentTimestamp(currentTime)); rexCallToMongoMap.put(SqlStdOperatorTable.DATETIME_PLUS, DateFunctionTranslator::translateDateAdd); rexCallToMongoMap.put(SqlStdOperatorTable.EXTRACT, DateFunctionTranslator::translateExtract); rexCallToMongoMap.put(SqlLibraryOperators.DAYNAME, DateFunctionTranslator::translateDayName); rexCallToMongoMap.put(SqlLibraryOperators.MONTHNAME, DateFunctionTranslator::translateMonthName); rexCallToMongoMap.put(SqlStdOperatorTable.FLOOR, DateFunctionTranslator::translateFloor); rexCallToMongoMap.put(SqlStdOperatorTable.MINUS_DATE, DateFunctionTranslator::translateDateDiff); // CASE, ITEM rexCallToMongoMap.put(SqlStdOperatorTable.CASE, RexToMongoTranslator::getMongoAggregateForCase); rexCallToMongoMap.put(SqlStdOperatorTable.ITEM, RexToMongoTranslator::getMongoAggregateForItem); // String operations rexCallToMongoMap.put(SqlStdOperatorTable.CONCAT, StringFunctionTranslator::getMongoAggregateForConcatOperator); rexCallToMongoMap.put(SqlLibraryOperators.CONCAT_FUNCTION, StringFunctionTranslator::getMongoAggregateForConcatOperator); rexCallToMongoMap.put(SqlStdOperatorTable.SUBSTRING, StringFunctionTranslator::getMongoAggregateForSubstringOperator); rexCallToMongoMap.put(SqlStdOperatorTable.POSITION, StringFunctionTranslator::getMongoAggregateForPositionStringOperator); rexCallToMongoMap.put(SqlStdOperatorTable.UPPER, StringFunctionTranslator::getMongoAggregateForStringOperator); rexCallToMongoMap.put(SqlStdOperatorTable.LOWER, StringFunctionTranslator::getMongoAggregateForStringOperator); rexCallToMongoMap.put(SqlStdOperatorTable.CHAR_LENGTH, StringFunctionTranslator::getMongoAggregateForStringOperator); rexCallToMongoMap.put(SqlLibraryOperators.LEFT, StringFunctionTranslator::getMongoAggregateForLeftOperator); rexCallToMongoMap.put(SqlLibraryOperators.RIGHT, StringFunctionTranslator::getMongoAggregateForRightOperator); } private static Operand getMongoAggregateForAndOperator(final RexCall call, final List<Operand> operands, final String s) { final StringBuilder sb = new StringBuilder(); sb.append("{$cond: [{$and: ["); for (Operand value: operands) { sb.append("{$eq: [true, ").append(value).append("]},"); } sb.deleteCharAt(sb.length() - 1); sb.append("]}, true,"); sb.append("{$cond: [{$or: ["); for (Operand value: operands) { sb.append("{$eq: [false, ").append(value).append("]},"); } sb.deleteCharAt(sb.length() - 1); sb.append("]}, false, null]}]}"); return new Operand( sb.toString(), SimpleMatchTranslator.getAndOrOperator(operands, s), false); } private static Operand getMongoAggregateForOrOperator(final RexCall call, final List<Operand> operands, final String s) { final StringBuilder sb = new StringBuilder(); sb.append("{$cond: [{$or: ["); for (Operand value: operands) { sb.append("{$eq: [true, ").append(value.getAggregationValue()).append("]},"); } sb.deleteCharAt(sb.length() - 1); sb.append("]}, true,"); sb.append("{$cond: [{$and: ["); for (Operand value: operands) { sb.append("{$eq: [false, ").append(value.getAggregationValue()).append("]},"); } sb.deleteCharAt(sb.length() - 1); sb.append("]}, false, null]}]}"); return new Operand( sb.toString(), SimpleMatchTranslator.getAndOrOperator(operands, s), false); } protected RexToMongoTranslator(final JavaTypeFactory typeFactory, final List<String> inFields, final List<String> keys, final DocumentDbSchemaTable schemaTable, final Instant currentTime) { super(true); initializeRexCallToMongoMap(currentTime); this.typeFactory = typeFactory; this.inFields = inFields; this.keys = keys; this.schemaTable = schemaTable; } @SneakyThrows @Override public Operand visitLiteral(final RexLiteral literal) { if (literal.getValue() == null) { return new Operand("null"); } switch (literal.getType().getSqlTypeName()) { case DECIMAL: final Comparable value = literal.getValue(); final String decimal128Format = "{\"$numberDecimal\": \"" + value + "\"}"; return new Operand("{\"$literal\": " + decimal128Format + "}", decimal128Format, true); case DOUBLE: case FLOAT: case REAL: final String doubleFormat = "{\"$numberDouble\": \"" + getValueAs(literal, Double.class) + "\"}"; return new Operand("{\"$literal\": " + doubleFormat + "}", doubleFormat, true); case TINYINT: case SMALLINT: case INTEGER: // Convert supported intervals to milliseconds. final String intFormat = "{\"$numberInt\": \"" + getValueAs(literal, Long.class) + "\"}"; return new Operand("{\"$literal\": " + intFormat + "}", intFormat, true); case BIGINT: case INTERVAL_MONTH: case INTERVAL_YEAR: case INTERVAL_YEAR_MONTH: case INTERVAL_DAY: case INTERVAL_DAY_HOUR: case INTERVAL_DAY_MINUTE: case INTERVAL_DAY_SECOND: case INTERVAL_HOUR: case INTERVAL_HOUR_MINUTE: case INTERVAL_HOUR_SECOND: case INTERVAL_MINUTE: case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: // Convert supported intervals to milliseconds (DAY TO SECOND types) OR months (YEAR TO MONTH types). final String longFormat = "{\"$numberLong\": \"" + getValueAs(literal, Long.class) + "\"}"; return new Operand("{\"$literal\": " + longFormat + "}", longFormat, true); case DATE: // NOTE: Need to get the number of milliseconds from Epoch (not # of days). final String dateFormat = "{\"$date\": {\"$numberLong\": \"" + getValueAs(literal, DateString.class).getMillisSinceEpoch() + "\" } }"; return new Operand(dateFormat, dateFormat, true); case TIME: // NOTE: Need to get the number of milliseconds from day. Date portion is left as zero epoch. final String timeFormat = "{\"$date\": {\"$numberLong\": \"" + getValueAs(literal, TimeString.class).getMillisOfDay() + "\" } }"; return new Operand(timeFormat, timeFormat, true); case TIMESTAMP: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: // Convert from date in milliseconds to MongoDb date. final String datetimeFormat = "{\"$date\": {\"$numberLong\": \"" + getValueAs(literal, Long.class) + "\" } }"; return new Operand(datetimeFormat, datetimeFormat, true); case BINARY: case VARBINARY: final String base64Literal = BaseEncoding.base64() .encode(getValueAs(literal, byte[].class)); final String binaryFormat = "{\"$binary\": {\"base64\": \"" + base64Literal + "\", \"subType\": \"00\"}}"; return new Operand(binaryFormat, binaryFormat, true); default: // Note: If type is [[LONG][N]VAR]CHAR, this call returns a properly escaped double-quoted string. final String simpleLiteral = RexToLixTranslator.translateLiteral(literal, literal.getType(), typeFactory, NullAs.NOT_POSSIBLE).toString(); return new Operand("{\"$literal\": " + simpleLiteral + "}", simpleLiteral, true); } } @Override public Operand visitInputRef(final RexInputRef inputRef) { // NOTE: Pass the column metadata with the operand. return new Operand( maybeQuote("$" + inFields.get(inputRef.getIndex())), maybeQuote(inFields.get(inputRef.getIndex())), false, schemaTable.getColumnMap().get(keys.get(inputRef.getIndex()))); } @SneakyThrows @Override public Operand visitCall(final RexCall call) { final String name = isItem(call); if (name != null) { return new Operand("'$" + name + "'"); } final List<Operand> strings = visitList(call.operands); if (call.getKind() == SqlKind.CAST || call.getKind() == SqlKind.REINTERPRET) { return getCastExpression(call, strings); } if (rexCallToMongoMap.containsKey(call.getOperator())) { final Operand result = rexCallToMongoMap.get(call.getOperator()).apply(call, strings); if (result != null) { return result; } } throw new IllegalArgumentException("Translation of " + call + " is not supported by DocumentDbRules"); } private static Operand getCastExpression(final RexCall call, final List<Operand> strings) { // Handle CAST of CHAR/VARCHAR to numeric types. if (call.operands.size() == 1 && call.operands.get(0) instanceof RexLiteral) { final RexLiteral operand = (RexLiteral) call.operands.get(0); switch (operand.getType().getSqlTypeName()) { case CHAR: case VARCHAR: final String value = operand.getValueAs(String.class); switch (call.type.getSqlTypeName()) { case DECIMAL: final String decimal128Format = "{\"$numberDecimal\": \"" + value + "\"}"; return new Operand("{\"$literal\": " + decimal128Format + "}", decimal128Format, true); case DOUBLE: case FLOAT: case REAL: final String doubleFormat = "{\"$numberDouble\": \"" + value + "\"}"; return new Operand("{\"$literal\": " + doubleFormat + "}", doubleFormat, true); case BOOLEAN: break; case TINYINT: case SMALLINT: case INTEGER: // Convert supported intervals to milliseconds. final String intFormat = "{\"$numberInt\": \"" + value + "\"}"; return new Operand("{\"$literal\": " + intFormat + "}", intFormat, true); case BIGINT: // Convert supported intervals to milliseconds. final String longFormat = "{\"$numberLong\": \"" + value + "\"}"; return new Operand("{\"$literal\": " + longFormat + "}", longFormat, true); default: break; } break; default: break; } } // TODO: Handle case when DocumentDB supports $convert. return strings.get(0); } private static Operand getMongoAggregateForIntegerDivide(final RexCall call, final List<Operand> strings) { return getIntegerDivisionOperation(strings.get(0), strings.get(1)); } private static Operand getMongoAggregateForCase( final RexCall call, final List<Operand> strings) { final StringBuilder sb = new StringBuilder(); final StringBuilder finish = new StringBuilder(); // case(a, b, c) -> $cond:[a, b, c] // case(a, b, c, d) -> $cond:[a, b, $cond:[c, d, null]] // case(a, b, c, d, e) -> $cond:[a, b, $cond:[c, d, e]] for (int i = 0; i < strings.size(); i += 2) { sb.append("{$cond:["); finish.append("]}"); sb.append(strings.get(i)); sb.append(','); sb.append(strings.get(i + 1)); sb.append(','); if (i == strings.size() - 3) { sb.append(strings.get(i + 2)); break; } if (i == strings.size() - 2) { sb.append("null"); break; } } sb.append(finish); return new Operand(sb.toString()); } private static Operand getMongoAggregateForItem( final RexCall call, final List<Operand> strings) { final RexNode op1 = call.operands.get(1); if (op1 instanceof RexLiteral && op1.getType().getSqlTypeName() == SqlTypeName.INTEGER) { if (!Bug.CALCITE_194_FIXED) { return new Operand("'" + stripQuotes(strings.get(0).getAggregationValue()) + "[" + ((RexLiteral) op1).getValue2() + "]'"); } return new Operand(strings.get(0) + "[" + strings.get(1) + "]"); } return null; } @SneakyThrows private static Operand getMongoAggregateForComparisonOperator( final RexCall call, final List<Operand> strings, final String stdOperator) { // {$cond: [<null check expression>, <comparison expression>, null]} final String aggregateExpr = "{\"$cond\": [" + getNullCheckExpr(strings) + ", " + getMongoAggregateForOperator(call, strings, stdOperator) + ", null]}"; return new Operand( aggregateExpr, hasObjectIdAndLiteral(call, strings) ? SimpleMatchTranslator.getObjectIdComparisonOperator(call, strings, stdOperator) : SimpleMatchTranslator.getComparisonOperator(call, strings, stdOperator), false); } private static String getNullCheckExpr(final List<Operand> strings) { final StringBuilder nullCheckOperator = new StringBuilder("{\"$and\": ["); for (Operand s : strings) { nullCheckOperator.append("{\"$gt\": ["); nullCheckOperator.append(s); nullCheckOperator.append(", null]},"); } nullCheckOperator.deleteCharAt(nullCheckOperator.length() - 1); nullCheckOperator.append("]}"); return nullCheckOperator.toString(); } private static Operand getMongoAggregateForNullOperator( final RexCall call, final List<Operand> strings, final String stdOperator) { return new Operand( "{" + stdOperator + ": [" + strings.get(0) + ", null]}", SimpleMatchTranslator.getNullCheckOperator(call, strings), false); } } @NonNull private static <T> T getValueAs(final RexLiteral literal, final Class<T> clazz) throws SQLException { final T result = literal.getValueAs(clazz); if (result == null) { throw SqlError.createSQLException(LOGGER, SqlState.INVALID_QUERY_EXPRESSION, SqlError.MISSING_LITERAL_VALUE, literal.getTypeName().getName()); } return result; } private static String stripQuotes(final String s) { return s.startsWith("'") && s.endsWith("'") ? s.substring(1, s.length() - 1) : s; } @SneakyThrows private static Operand getMongoAggregateForOperator( final RexCall call, final List<Operand> strings, final String stdOperator) { if (hasObjectIdAndLiteral(call, strings)) { return new Operand(getObjectIdAggregateForOperator(call, strings, stdOperator)); } return new Operand("{" + maybeQuote(stdOperator) + ": [" + Util.commaList(strings) + "]}"); } private static Operand getIntegerDivisionOperation(final String value, final String divisor) { // TODO: when $trunc is supported in DocumentDB, add back. //final String intDivideOptFormat = "{ \"$trunc\": [ {\"$divide\": [%s]}, 0 ]}"; // NOTE: $mod, $subtract, and $divide - together, perform integer division final String modulo = String.format( "{\"$mod\": [%s, %s]}", value, divisor); final String subtractRemainder = String.format( "{\"$subtract\": [%s, %s]}", value, modulo); return Operand.format("{\"$divide\": [%s, %s]}", subtractRemainder, divisor); } private static Operand getIntegerDivisionOperation(final Operand value, final Operand divisor) { return getIntegerDivisionOperation(value.getAggregationValue(), divisor.getAggregationValue()); } private static String getObjectIdAggregateForOperator( final RexCall call, final List<Operand> strings, final String stdOperator) throws SQLException { // $or together the $oid and native operations. final String oidOperation = "{" + maybeQuote(stdOperator) + ": [" + Util.commaList(reformatObjectIdOperands(call, strings)) + "]}"; final String nativeOperation = "{" + maybeQuote(stdOperator) + ": [" + Util.commaList(strings) + "]}"; return "{\"$or\": [" + oidOperation + ", " + nativeOperation + "]}"; } @SneakyThrows private static boolean hasObjectIdAndLiteral( final RexCall call, final List<Operand> strings) { final Operand objectIdOperand = strings.stream() .filter(operand -> operand.getColumn() != null && operand.getColumn().getDbType() == BsonType.OBJECT_ID) .findFirst().orElse(null); if (objectIdOperand == null) { return false; } for (int index = 0; index < strings.size(); index++) { final Operand operand = strings.get(index); if (operand == objectIdOperand || !(call.operands.get(index) instanceof RexLiteral)) { continue; } final RexLiteral literal = (RexLiteral) call.operands.get(index); switch (literal.getTypeName()) { case BINARY: case VARBINARY: final byte[] valueAsByteArray = getValueAs(literal, byte[].class); if (valueAsByteArray.length == 12) { return true; } break; case CHAR: case VARCHAR: final String valueAsString = getValueAs(literal, String.class); if (OBJECT_ID_PATTERN.matcher(valueAsString).matches()) { return true; } break; } } return false; } private static List<Operand> reformatObjectIdOperands( final RexCall call, final List<Operand> strings) throws SQLException { final List<Operand> copyOfStrings = new ArrayList<>(); for (int index = 0; index < strings.size(); index++) { final Operand operand = strings.get(index); if (call.operands.get(index) instanceof RexLiteral) { final RexLiteral literal = (RexLiteral) call.operands.get(index); copyOfStrings.add(reformatObjectIdLiteral(literal, operand)); } else { copyOfStrings.add(operand); } } return copyOfStrings; } private static Operand reformatObjectIdLiteral( final RexLiteral literal, final Operand operand) throws SQLException { switch (literal.getTypeName()) { case BINARY: case VARBINARY: final byte[] valueAsByteArray = getValueAs(literal, byte[].class); if (valueAsByteArray.length == 12) { final String value = "{\"$oid\": \"" + BaseEncoding.base16().encode(valueAsByteArray) + "\"}"; return new Operand(value, value, true); } else { return operand; } case CHAR: case VARCHAR: final String valueAsString = getValueAs(literal, String.class); if (OBJECT_ID_PATTERN.matcher(valueAsString).matches()) { final String value = "{\"$oid\": \"" + valueAsString + "\"}"; return new Operand(value, value, true); } else { return operand; } default: return operand; } } private static class DateFunctionTranslator { private static final String CURRENT_DATE = "CURRENT_DATE"; private static final String CURRENT_TIMESTAMP = "CURRENT_TIMESTAMP"; private static final Map<TimeUnitRange, String> DATE_PART_OPERATORS = new HashMap<>(); private static final Instant FIRST_DAY_OF_WEEK_AFTER_EPOCH = Instant.parse("1970-01-05T00:00:00Z"); static { // Date part operators DATE_PART_OPERATORS.put(TimeUnitRange.YEAR, "$year"); DATE_PART_OPERATORS.put(TimeUnitRange.MONTH, "$month"); DATE_PART_OPERATORS.put(TimeUnitRange.WEEK, "$week"); DATE_PART_OPERATORS.put(TimeUnitRange.HOUR, "$hour"); DATE_PART_OPERATORS.put(TimeUnitRange.MINUTE, "$minute"); DATE_PART_OPERATORS.put(TimeUnitRange.SECOND, "$second"); DATE_PART_OPERATORS.put(TimeUnitRange.DOY, "$dayOfYear"); DATE_PART_OPERATORS.put(TimeUnitRange.DAY, "$dayOfMonth"); DATE_PART_OPERATORS.put(TimeUnitRange.DOW, "$dayOfWeek"); DATE_PART_OPERATORS.put(TimeUnitRange.ISODOW, "$isoDayOfWeek"); DATE_PART_OPERATORS.put(TimeUnitRange.ISOYEAR, "$isoWeekYear"); } private static Operand translateCurrentTimestamp(final Instant currentTime) { final String currentTimestamp = "{\"$date\": {\"$numberLong\": " + "\"" + currentTime.toEpochMilli() + "\"}}"; return new Operand(currentTimestamp, currentTimestamp, true); } @SneakyThrows private static Operand translateDateAdd(final RexCall call, final List<Operand> strings) { verifySupportedDateAddType(call.getOperands().get(1)); // Is date addition between literals (including CURRENT_DATE)? final boolean isLiteralCandidate = isDateLiteralCandidate(call, strings); if (isLiteralCandidate) { // Perform in-memory calculation before sending to server. return getDateAddLiteralOperand(strings); } // Otherwise, perform addition on server. return new Operand("{ \"$add\":" + "[" + Util.commaList(strings) + "]}"); } private static boolean isDateLiteralCandidate(final RexCall call, final List<Operand> strings) { final boolean allLiterals = call.getOperands().stream() .allMatch(op -> { final SqlKind opKind = op.getKind(); final String opName = op.toString(); return opKind == SqlKind.LITERAL || opName.equalsIgnoreCase(CURRENT_DATE) || opName.equalsIgnoreCase(CURRENT_TIMESTAMP); }); final boolean allHaveQueryValue = strings.stream().allMatch(op -> op.getQueryValue() != null); return allLiterals && allHaveQueryValue; } private static Operand getDateAddLiteralOperand(final List<Operand> strings) { final String queryValue0 = strings.get(0).getQueryValue(); final String queryValue1 = strings.get(1).getQueryValue(); final BsonDocument document0 = BsonDocument.parse("{field: " + queryValue0 + "}"); final BsonDocument document1 = BsonDocument.parse("{field: " + queryValue1 + "}"); long sum = 0L; for (BsonValue v : new BsonValue[]{document0.get("field"), document1.get("field")}) { switch (v.getBsonType()) { case DATE_TIME: sum += v.asDateTime().getValue(); break; case INT64: sum += v.asInt64().getValue(); break; default: throw new UnsupportedOperationException( "Unsupported data type '" + v.getBsonType().name() + "'"); } } final String query = "{\"$date\": {\"$numberLong\": \"" + sum + "\"}}"; return new Operand(query, query, true); } private static void verifySupportedDateAddType(final RexNode node) throws SQLFeatureNotSupportedException { if (node.getType().getSqlTypeName() == SqlTypeName.INTERVAL_MONTH || node.getType().getSqlTypeName() == SqlTypeName.INTERVAL_YEAR) { throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_CONVERSION, node.getType().getSqlTypeName().getName(), SqlTypeName.TIMESTAMP.getName()); } } @SneakyThrows private static Operand translateDateDiff(final RexCall call, final List<Operand> strings) { final TimeUnitRange interval = getIntervalQualifier(call).timeUnitRange; switch (interval) { case YEAR: return formatDateDiffYear(strings); case QUARTER: case MONTH: return formatDateDiffMonth(strings, interval); default: return getMongoAggregateForOperator( call, strings, RexToMongoTranslator.MONGO_OPERATORS.get(SqlStdOperatorTable.MINUS_DATE)); } } @NonNull private static SqlIntervalQualifier getIntervalQualifier(final RexCall call) throws SQLException { final SqlIntervalQualifier result = call.getType().getIntervalQualifier(); if (result == null) { throw SqlError.createSQLException(LOGGER, SqlState.INVALID_QUERY_EXPRESSION, SqlError.MISSING_LITERAL_VALUE, call.getType().getSqlTypeName().getName()); } return result; } private static Operand formatDateDiffYear(final List<Operand> strings) { final String dateDiffYearFormat = "{'$subtract': [{'$year': %1$s}, {'$year': %2$s}]}"; return Operand.format(dateDiffYearFormat, strings.get(0), strings.get(1)); } private static Operand formatDateDiffMonth(final List<Operand> strings, final TimeUnitRange timeUnitRange) { final String yearPartMultiplier = timeUnitRange == TimeUnitRange.QUARTER ? "4" : "12"; final String monthPart1 = timeUnitRange == TimeUnitRange.QUARTER ? translateExtractQuarter(strings.get(0)).getAggregationValue() : String.format("{'$month': %s}", strings.get(0)); final String monthPart2 = timeUnitRange == TimeUnitRange.QUARTER ? translateExtractQuarter(strings.get(1)).getAggregationValue() : String.format("{'$month': %s}", strings.get(1)); final String dateDiffMonthFormat = "{'$subtract': [ " + "{'$add': [ " + "{'$multiply': [%1$s, {'$year': %2$s}]}, " + "%4$s]}, " + "{'$add': [ " + "{'$multiply': [%1$s, {'$year': %3$s}]}, " + "%5$s]}]}"; return Operand.format( dateDiffMonthFormat, yearPartMultiplier, strings.get(0), strings.get(1), monthPart1, monthPart2); } private static Operand translateExtract(final RexCall call, final List<Operand> strings) { // The first argument to extract is the interval (literal) // and the second argument is the date (can be any node evaluating to a date). final RexLiteral literal = (RexLiteral) call.getOperands().get(0); final TimeUnitRange range = literal.getValueAs(TimeUnitRange.class); if (range == TimeUnitRange.QUARTER) { return translateExtractQuarter(strings.get(1)); } return new Operand("{ " + quote(DATE_PART_OPERATORS.get(range)) + ": " + strings.get(1) + "}"); } private static Operand translateExtractQuarter(final Operand date) { final String extractQuarterFormatString = "{'$cond': [{'$lte': [{'$month': %1$s}, 3]}, 1," + " {'$cond': [{'$lte': [{'$month': %1$s}, 6]}, 2," + " {'$cond': [{'$lte': [{'$month': %1$s}, 9]}, 3," + " {'$cond': [{'$lte': [{'$month': %1$s}, 12]}, 4," + " null]}]}]}]}"; return Operand.format(extractQuarterFormatString, date); } public static Operand translateDayName(final RexCall rexCall, final List<Operand> strings) { final String dayNameFormatString = " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 1]}, '%1$s'," + " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 2]}, '%2$s'," + " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 3]}, '%3$s'," + " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 4]}, '%4$s'," + " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 5]}, '%5$s'," + " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 6]}, '%6$s'," + " {'$cond': [{'$eq': [{'$dayOfWeek': %8$s}, 7]}, '%7$s'," + " null]}]}]}]}]}]}]}"; return Operand.format(dayNameFormatString, DayOfWeek.SUNDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), DayOfWeek.MONDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), DayOfWeek.TUESDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), DayOfWeek.WEDNESDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), DayOfWeek.THURSDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), DayOfWeek.FRIDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), DayOfWeek.SATURDAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), strings.get(0)); } public static Operand translateMonthName(final RexCall rexCall, final List<Operand> strings) { final String monthNameFormatString = "{'$cond': [{'$eq': [{'$month': %13$s}, 1]}, '%1$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 2]}, '%2$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 3]}, '%3$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 4]}, '%4$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 5]}, '%5$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 6]}, '%6$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 7]}, '%7$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 8]}, '%8$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 9]}, '%9$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 10]}, '%10$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 11]}, '%11$s'," + " {'$cond': [{'$eq': [{'$month': %13$s}, 12]}, '%12$s'," + " null]}]}]}]}]}]}]}]}]}]}]}]}"; return Operand.format(monthNameFormatString, Month.JANUARY.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.FEBRUARY.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.MARCH.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.APRIL.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.MAY.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.JUNE.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.JULY.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.AUGUST.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.SEPTEMBER.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.OCTOBER.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.NOVEMBER.getDisplayName(TextStyle.FULL, Locale.getDefault()), Month.DECEMBER.getDisplayName(TextStyle.FULL, Locale.getDefault()), strings.get(0)); } @SneakyThrows private static Operand translateFloor(final RexCall rexCall, final List<Operand> strings) { // TODO: Add support for integer floor with one operand if (rexCall.operands.size() != 2) { return null; } // NOTE: Required for getting FLOOR of date-time final RexNode operand2 = rexCall.operands.get(1); if (!(operand2.isA(SqlKind.LITERAL) && operand2.getType().getSqlTypeName() == SqlTypeName.SYMBOL && (((RexLiteral) operand2).getValue() instanceof TimeUnitRange))) { return null; } final RexLiteral literal = (RexLiteral) operand2; final TimeUnitRange timeUnitRange = getValueAs(literal, TimeUnitRange.class); switch (timeUnitRange) { case YEAR: case MONTH: return new Operand(formatYearMonthFloorOperation(strings, timeUnitRange)); case QUARTER: return new Operand(formatQuarterFloorOperation(strings)); case WEEK: case DAY: case HOUR: case MINUTE: case SECOND: case MILLISECOND: return formatMillisecondFloorOperation(strings, timeUnitRange); default: throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PROPERTY, timeUnitRange.toString()); } } private static String formatYearMonthFloorOperation( final List<Operand> strings, final TimeUnitRange timeUnitRange) { final String monthFormat = timeUnitRange == TimeUnitRange.YEAR ? "01" : "%m"; return formatYearMonthFloorOperation(strings.get(0), monthFormat); } private static String formatYearMonthFloorOperation( final Operand dateOperand, final String monthFormat) { final String yearFormat = "%Y"; return String.format( "{'$dateFromString': {'dateString':" + " {'$dateToString':" + " {'date': %1$s, 'format': '%2$s-%3$s-01T00:00:00Z'}}}}", dateOperand, yearFormat, monthFormat); } private static Operand formatMillisecondFloorOperation( final List<Operand> strings, final TimeUnitRange timeUnitRange) throws SQLFeatureNotSupportedException { final Instant baseDate = timeUnitRange == TimeUnitRange.WEEK ? FIRST_DAY_OF_WEEK_AFTER_EPOCH // Monday (or first day of week) : Instant.EPOCH; final long divisorLong = getDivisorValueForNumericFloor(timeUnitRange); final String divisor = String.format( "{\"$numberLong\": \"%d\"}", divisorLong); final String subtract = String.format( "{\"$subtract\": [%s, {\"$date\": {\"$numberLong\": \"%d\"}}]}", strings.get(0), baseDate.toEpochMilli()); final Operand divide = getIntegerDivisionOperation(subtract, divisor); final String multiply = String.format( "{\"$multiply\": [%s, %s]}", divisor, divide); return Operand.format( "{\"$add\": [{\"$date\": {\"$numberLong\": \"%d\"}}, %s]}", baseDate.toEpochMilli(), multiply); } private static String formatQuarterFloorOperation(final List<Operand> strings) { final String truncateQuarterFormatString = "{'$cond': [{'$lte': [{'$month': %1$s}, 3]}, %2$s," + " {'$cond': [{'$lte': [{'$month': %1$s}, 6]}, %3$s," + " {'$cond': [{'$lte': [{'$month': %1$s}, 9]}, %4$s," + " {'$cond': [{'$lte': [{'$month': %1$s}, 12]}, %5$s," + " null]}]}]}]}"; final String monthFormatJanuary = "01"; final String monthFormatApril = "04"; final String monthFormatJuly = "07"; final String monthFormatOctober = "10"; return String.format(truncateQuarterFormatString, strings.get(0), formatYearMonthFloorOperation(strings.get(0), monthFormatJanuary), formatYearMonthFloorOperation(strings.get(0), monthFormatApril), formatYearMonthFloorOperation(strings.get(0), monthFormatJuly), formatYearMonthFloorOperation(strings.get(0), monthFormatOctober)); } private static long getDivisorValueForNumericFloor(final TimeUnitRange timeUnitRange) throws SQLFeatureNotSupportedException { final long divisorLong; switch (timeUnitRange) { case WEEK: divisorLong = ChronoUnit.WEEKS.getDuration().toMillis(); break; case DAY: divisorLong = ChronoUnit.DAYS.getDuration().toMillis(); break; case HOUR: divisorLong = ChronoUnit.HOURS.getDuration().toMillis(); break; case MINUTE: divisorLong = ChronoUnit.MINUTES.getDuration().toMillis(); break; case SECOND: divisorLong = ChronoUnit.SECONDS.getDuration().toMillis(); break; case MILLISECOND: divisorLong = 1; break; default: throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PROPERTY, timeUnitRange.toString()); } return divisorLong; } } /** * Translates expressions using only query operators. */ private static class SimpleMatchTranslator { private static final Map<SqlOperator, String> REVERSE_OPERATORS = new HashMap<>(); static { REVERSE_OPERATORS.put(SqlStdOperatorTable.EQUALS, "$eq"); REVERSE_OPERATORS.put(SqlStdOperatorTable.NOT_EQUALS, "$ne"); REVERSE_OPERATORS.put(SqlStdOperatorTable.GREATER_THAN, "$lte"); REVERSE_OPERATORS.put(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, "$lt"); REVERSE_OPERATORS.put(SqlStdOperatorTable.LESS_THAN, "$gte"); REVERSE_OPERATORS.put(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, "$gt"); } private static String getObjectIdComparisonOperator( final RexCall call, final List<Operand> strings, final String stdOperator ) throws SQLException { // $or together the $oid and native operations. final String nativeOperation = getComparisonOperator(call, strings, stdOperator); final String oidOperation = getComparisonOperator(call, reformatObjectIdOperands(call, strings), stdOperator); if (nativeOperation != null && oidOperation != null) { return "{\"$or\": [" + oidOperation + ", " + nativeOperation + "]}"; } return null; } private static String getComparisonOperator( final RexCall call, final List<Operand> strings, final String stdOperator) { // Handle NOT if the argument is a field reference. if (call.isA(SqlKind.NOT) && strings.get(0).isInputRef()) { return "{" + strings.get(0).getQueryValue() + ": false}"; } // If given 2 arguments, attempt to do a binary comparison. Only 1 side can be a field reference. // The other side must be a literal or an expression that can be supported without aggregate operators. if (strings.size() == 2) { final Operand left = strings.get(0); final Operand right = strings.get(1); final String reverseOp = REVERSE_OPERATORS.get(call.getOperator()); final String simpleComparison = formatSimpleBinaryComparison(stdOperator, left, right); if (simpleComparison != null) { return simpleComparison; } // Try to return a simple comparison by swapping the operands or return null. return formatSimpleBinaryComparison(reverseOp, right, left); } // For any other scenario, return null. return null; } private static String getAndOrOperator(final List<Operand> operands, final String op) { final StringBuilder simple = new StringBuilder(); simple.append("{").append(op).append(": ["); for (Operand value: operands) { // If any operand is null, return null. if (value.getQueryValue() == null) { return null; } simple.append(value.isInputRef() ? "{" + value.getQueryValue() + ": true}" : value.getQueryValue()); simple.append(","); } simple.deleteCharAt(simple.length() - 1); simple.append("]}"); return simple.toString(); } private static String formatSimpleBinaryComparison( final String op, final Operand leftOperand, final Operand rightOperand ) { // If left side is field reference and the right side is not and neither query values are null, // return the simple comparison. if (leftOperand.isInputRef() && rightOperand.isQuerySyntax() && leftOperand.getQueryValue() != null && rightOperand.getQueryValue() != null) { String comparison = "{" + leftOperand.getQueryValue() + ": {" + op + ": " + rightOperand.getQueryValue() + "}}"; // For not equals, need to also handle that value is not null or undefined. if (op.equals(RexToMongoTranslator.MONGO_OPERATORS.get(SqlStdOperatorTable.NOT_EQUALS))) { comparison = "{" + leftOperand.getQueryValue() + ": {$nin: [null, " + rightOperand.getQueryValue() + "]}}"; } return comparison; } return null; } private static String getNullCheckOperator( final RexCall call, final List<Operand> operands ) { final String op = call.getOperator() == SqlStdOperatorTable.IS_NULL ? "$eq" : "$ne"; return operands.get(0).isInputRef() ? "{" + operands.get(0).getQueryValue() + ": {" + op + ": null }}" : null; } } private static class StringFunctionTranslator { private static final Map<SqlOperator, String> STRING_OPERATORS = new HashMap<>(); static { STRING_OPERATORS.put(SqlStdOperatorTable.CONCAT, "$concat"); STRING_OPERATORS.put(SqlStdOperatorTable.LOWER, "$toLower"); STRING_OPERATORS.put(SqlStdOperatorTable.UPPER, "$toUpper"); STRING_OPERATORS.put(SqlStdOperatorTable.CHAR_LENGTH, "$strLenCP"); STRING_OPERATORS.put(SqlStdOperatorTable.SUBSTRING, "$substrCP"); STRING_OPERATORS.put(SqlStdOperatorTable.POSITION, "$indexOfCP"); } private static Operand getMongoAggregateForSubstringOperator( final RexCall call, final List<Operand> strings) { final List<Operand> inputs = new ArrayList<>(strings); // Convert from one-indexed to zero-indexed inputs.set(1, new Operand("{\"$subtract\": [" + inputs.get(1) + ", 1]}")); if (inputs.size() == 2) { inputs.add(new Operand(String.valueOf(Integer.MAX_VALUE))); } return new Operand("{" + STRING_OPERATORS.get(SqlStdOperatorTable.SUBSTRING) + ": [" + Util.commaList(inputs) + "]}"); } private static Operand getMongoAggregateForConcatOperator( final RexCall call, final List<Operand> strings) { // If using CONCAT function instead of operator, convert any null values to empty string. final List<String> inputs = strings.stream() .map(string -> call.getOperator() == SqlLibraryOperators.CONCAT_FUNCTION ? "{\"$ifNull\": [" + string + ", \"\" ]}" : string.toString()) .collect(Collectors.toList()); return new Operand("{" + STRING_OPERATORS.get(SqlStdOperatorTable.CONCAT) + ": [" + Util.commaList(inputs) + "]}"); } private static Operand getMongoAggregateForPositionStringOperator( final RexCall call, final List<Operand> strings) { final List<String> args = new ArrayList<>(); final StringBuilder operand = new StringBuilder(); final StringBuilder finish = new StringBuilder(); // Comparison is case-insensitive so convert both strings to same case. // Note also that argument order in $indexOfCP needs to be string, substring, [startIndex]. args.add("{" + STRING_OPERATORS.get(SqlStdOperatorTable.LOWER) + ":" + strings.get(1) + "}"); args.add("{" + STRING_OPERATORS.get(SqlStdOperatorTable.LOWER) + ":" + strings.get(0) + "}"); // Check if either string is null. operand.append("{\"$cond\": [").append(RexToMongoTranslator.getNullCheckExpr(strings)).append(", "); // Add starting index if any. if (strings.size() == 3) { args.add("{\"$subtract\": [" + strings.get(2) + ", 1]}"); // Convert to 0-based. operand.append("{\"$cond\": [{\"$lte\": [").append(strings.get(2)).append(", 0]}, 0, "); // Check if 1-based index > 0. finish.append("]}"); } // Convert 0-based index to 1-based. operand.append("{\"$add\": [{").append(STRING_OPERATORS.get(SqlStdOperatorTable.POSITION)).append(": [").append(Util.commaList(args)).append("]}, 1]}"); operand.append(finish); operand.append(", null ]}"); // Return 1-based index when string is found. // Returns null if either base string or substring is null. // Returns 0 if substring cannot be found or index (if any) is null or non-positive. return new Operand(operand.toString()); } private static Operand getMongoAggregateForStringOperator( final RexCall call, final List<Operand> strings) { // Add a null check since these operators do not handle null or missing expressions correctly. return new Operand("{\"$cond\": [" + RexToMongoTranslator.getNullCheckExpr(strings) + ", " + "{" + STRING_OPERATORS.get(call.getOperator()) + ": " + strings.get(0) + "}" + ", null]}"); } private static Operand getMongoAggregateForLeftOperator( final RexCall call, final List<Operand> strings) { final List<Operand> inputs = new ArrayList<>(); inputs.add(strings.get(0)); // Substring as if starting from left. Start index = 0. inputs.add(new Operand("0")); inputs.add(strings.get(1)); // Returns substring starting from 0 to given length. // If length is greater than length of string, the entire string will be returned. return new Operand("{\"$cond\": [" + "{\"$and\": [" + RexToMongoTranslator.getNullCheckExpr(strings) + ", " + "{\"$gte\":[" + strings.get(1) + ", 0]}" + "]}, " + "{" + STRING_OPERATORS.get(SqlStdOperatorTable.SUBSTRING) + ": [" + Util.commaList(inputs) + "]}" + ", null]}"); } private static Operand getMongoAggregateForRightOperator( final RexCall call, final List<Operand> strings) { final List<Operand> inputs = new ArrayList<>(); inputs.add(strings.get(0)); // Substring as if starting from right. Start index = length - # of chars inputs.add(new Operand("{\"$subtract\": [ " + "{" + STRING_OPERATORS.get(SqlStdOperatorTable.CHAR_LENGTH) + ":" + strings.get(0) + "}, " + strings.get(1) + "]}")); inputs.add(strings.get(1)); // If string length is less than or equal to number of characters then return the entire // string. Else, return substring. return new Operand("{\"$cond\": [" + "{\"$and\": [" + RexToMongoTranslator.getNullCheckExpr(strings) + ", " + "{\"$gte\":[" + strings.get(1) + ", 0]}" + "]}, " + "{\"$cond\": [ " + "{\"$lte\": [" + "{" + STRING_OPERATORS.get(SqlStdOperatorTable.CHAR_LENGTH) + ":" + strings.get(0) + "}, " + strings.get(1) + "]}, " + strings.get(0) + ", " + "{" + STRING_OPERATORS.get(SqlStdOperatorTable.SUBSTRING) + ": [" + Util.commaList(inputs) + "]}]}" + ", null]}"); } } /** Base class for planner rules that convert a relational expression to * MongoDB calling convention. */ abstract static class DocumentDbConverterRule extends ConverterRule { protected DocumentDbConverterRule(final Config config) { super(config); } } /** * Rule to convert a {@link Sort} to a * {@link DocumentDbSort}. */ private static class DocumentDbSortRule extends DocumentDbConverterRule { static final DocumentDbSortRule INSTANCE = Config.INSTANCE .withConversion(Sort.class, Convention.NONE, DocumentDbRel.CONVENTION, "DocumentDbSortRule") .withRuleFactory(DocumentDbSortRule::new) .toRule(DocumentDbSortRule.class); DocumentDbSortRule(final Config config) { super(config); } @Override public RelNode convert(final RelNode rel) { final Sort sort = (Sort) rel; final RelTraitSet traitSet = sort.getTraitSet().replace(out) .replace(sort.getCollation()); return new DocumentDbSort(rel.getCluster(), traitSet, convert(sort.getInput(), traitSet.replace(RelCollations.EMPTY)), sort.getCollation(), sort.offset, sort.fetch); } } /** * Rule to convert a {@link LogicalFilter} to a * {@link DocumentDbFilter}. */ private static class DocumentDbFilterRule extends DocumentDbConverterRule { static final DocumentDbFilterRule INSTANCE = Config.INSTANCE .withConversion(LogicalFilter.class, Convention.NONE, DocumentDbRel.CONVENTION, "DocumentDbFilterRule") .withRuleFactory(DocumentDbFilterRule::new) .toRule(DocumentDbFilterRule.class); DocumentDbFilterRule(final Config config) { super(config); } @Override public RelNode convert(final RelNode rel) { final LogicalFilter filter = (LogicalFilter) rel; final RelTraitSet traitSet = filter.getTraitSet().replace(out); return new DocumentDbFilter( rel.getCluster(), traitSet, convert(filter.getInput(), out), filter.getCondition()); } } /** * Rule to convert a {@link LogicalProject} * to a {@link DocumentDbProject}. */ private static class DocumentDbProjectRule extends DocumentDbConverterRule { static final DocumentDbProjectRule INSTANCE = Config.INSTANCE .withConversion(LogicalProject.class, Convention.NONE, DocumentDbRel.CONVENTION, "DocumentDbProjectRule") .withRuleFactory(DocumentDbProjectRule::new) .toRule(DocumentDbProjectRule.class); DocumentDbProjectRule(final Config config) { super(config); } @Override public RelNode convert(final RelNode rel) { final LogicalProject project = (LogicalProject) rel; final RelTraitSet traitSet = project.getTraitSet().replace(out); return new DocumentDbProject(project.getCluster(), traitSet, convert(project.getInput(), out), project.getProjects(), project.getRowType()); } } /** * Rule to convert a {@link org.apache.calcite.rel.logical.LogicalJoin} to * a {@link DocumentDbJoin}. */ private static class DocumentDbJoinRule extends DocumentDbConverterRule { private static final DocumentDbJoinRule INSTANCE = Config.INSTANCE .withConversion(LogicalJoin.class, Convention.NONE, DocumentDbRel.CONVENTION, "DocumentDbJoinRule") .withRuleFactory(DocumentDbJoinRule::new) .toRule(DocumentDbJoinRule.class); protected DocumentDbJoinRule(final Config config) { super(config); } @Override public RelNode convert(final RelNode rel) { final LogicalJoin join = (LogicalJoin) rel; final RelTraitSet traitSet = join.getTraitSet().replace(out); return new DocumentDbJoin(join.getCluster(), traitSet, convert(join.getLeft(), out), convert(join.getRight(), out), join.getCondition(), join.getJoinType()); } } /* /** * Rule to convert a {@link LogicalCalc} to an * {@link MongoCalcRel}. o/ private static class MongoCalcRule extends DocumentDbConverterRule { private MongoCalcRule(MongoConvention out) { super( LogicalCalc.class, Convention.NONE, out, "MongoCalcRule"); } public RelNode convert(RelNode rel) { final LogicalCalc calc = (LogicalCalc) rel; // If there's a multiset, let FarragoMultisetSplitter work on it // first. if (RexMultisetUtil.containsMultiset(calc.getProgram())) { return null; } return new MongoCalcRel( rel.getCluster(), rel.getTraitSet().replace(out), convert( calc.getChild(), calc.getTraitSet().replace(out)), calc.getProgram(), Project.Flags.Boxed); } } public static class MongoCalcRel extends SingleRel implements MongoRel { private final RexProgram program; /** * Values defined in {@link org.apache.calcite.rel.core.Project.Flags}. o/ protected int flags; public MongoCalcRel( RelOptCluster cluster, RelTraitSet traitSet, RelNode child, RexProgram program, int flags) { super(cluster, traitSet, child); assert getConvention() instanceof MongoConvention; this.flags = flags; this.program = program; this.rowType = program.getOutputRowType(); } public RelOptPlanWriter explainTerms(RelOptPlanWriter pw) { return program.explainCalc(super.explainTerms(pw)); } public double getRows() { return LogicalFilter.estimateFilteredRows( getChild(), program); } public RelOptCost computeSelfCost(RelOptPlanner planner) { double dRows = RelMetadataQuery.getRowCount(this); double dCpu = RelMetadataQuery.getRowCount(getChild()) * program.getExprCount(); double dIo = 0; return planner.makeCost(dRows, dCpu, dIo); } public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) { return new MongoCalcRel( getCluster(), traitSet, sole(inputs), program.copy(), getFlags()); } public int getFlags() { return flags; } public RexProgram getProgram() { return program; } public SqlString implement(MongoImplementor implementor) { final SqlBuilder buf = new SqlBuilder(implementor.dialect); buf.append("SELECT "); if (isStar(program)) { buf.append("*"); } else { for (Ord<RexLocalRef> ref : Ord.zip(program.getProjectList())) { buf.append(ref.i == 0 ? "" : ", "); expr(buf, program, ref.e); alias(buf, null, getRowType().getFieldNames().get(ref.i)); } } implementor.newline(buf) .append("FROM "); implementor.subQuery(buf, 0, getChild(), "t"); if (program.getCondition() != null) { implementor.newline(buf); buf.append("WHERE "); expr(buf, program, program.getCondition()); } return buf.toSqlString(); } private static boolean isStar(RexProgram program) { int i = 0; for (RexLocalRef ref : program.getProjectList()) { if (ref.getIndex() != i++) { return false; } } return i == program.getInputRowType().getFieldCount(); } private static void expr( SqlBuilder buf, RexProgram program, RexNode rex) { if (rex instanceof RexLocalRef) { final int index = ((RexLocalRef) rex).getIndex(); expr(buf, program, program.getExprList().get(index)); } else if (rex instanceof RexInputRef) { buf.identifier( program.getInputRowType().getFieldNames().get( ((RexInputRef) rex).getIndex())); } else if (rex instanceof RexLiteral) { toSql(buf, (RexLiteral) rex); } else if (rex instanceof RexCall) { final RexCall call = (RexCall) rex; switch (call.getOperator().getSyntax()) { case Binary: expr(buf, program, call.getOperands().get(0)); buf.append(' ') .append(call.getOperator().toString()) .append(' '); expr(buf, program, call.getOperands().get(1)); break; default: throw new AssertionError(call.getOperator()); } } else { throw new AssertionError(rex); } } } private static SqlBuilder toSql(SqlBuilder buf, RexLiteral rex) { switch (rex.getTypeName()) { case CHAR: case VARCHAR: return buf.append( new NlsString(rex.getValue2().toString(), null, null) .asSql(false, false)); default: return buf.append(rex.getValue2().toString()); } } */ /** * Rule to convert an {@link LogicalAggregate} * to an {@link DocumentDbAggregate}. */ private static class DocumentDbAggregateRule extends DocumentDbConverterRule { static final DocumentDbAggregateRule INSTANCE = Config.INSTANCE .withConversion(LogicalAggregate.class, Convention.NONE, DocumentDbRel.CONVENTION, "DocumentDbAggregateRule") .withRuleFactory(DocumentDbAggregateRule::new) .toRule(DocumentDbAggregateRule.class); DocumentDbAggregateRule(final Config config) { super(config); } @Override public RelNode convert(final RelNode rel) { final LogicalAggregate agg = (LogicalAggregate) rel; final RelTraitSet traitSet = agg.getTraitSet().replace(out); try { return new DocumentDbAggregate( rel.getCluster(), traitSet, convert(agg.getInput(), traitSet.simplify()), agg.getGroupSet(), agg.getGroupSets(), agg.getAggCallList()); } catch (InvalidRelException e) { LOGGER.warn(e.toString()); return null; } } } /** * Container for operands with optional column metadata. */ @Getter static class Operand { private final String aggregationValue; private final String queryValue; private final boolean isQuerySyntax; private final DocumentDbSchemaColumn column; /** * Constructs an Operand from a String value. Only aggregation value is set. All other values * will be null. * * @param aggregationValue the String value. */ public Operand(final String aggregationValue) { this(aggregationValue, null, false, null); } /** * Constructs an Operand from the given values.The column value is left as null. * * @param aggregationValue the String value. This is an expression using aggregation operators. * @param queryValue This is an expression using query operators. * @param isQuerySyntax Whether or not the operand can be used on the right of query operators. This * includes literals, literals wrapped in CAST, and simple scalar function calls such as * CURRENT TIME. */ public Operand( final String aggregationValue, final String queryValue, final boolean isQuerySyntax) { this(aggregationValue, queryValue, isQuerySyntax, null); } /** * Constructs an Operand from the given values. * @param aggregationValue the String value. This is an expression using aggregation operators. * @param queryValue This is an expression using query operators. * @param isQuerySyntax Whether or not the operand can be used on the right of query operators. This includes literals, literals wrapped in CAST, * and simple scalar function calls such as CURRENT TIME. * @param column Column metadata if the operand represents a field reference. */ public Operand( final String aggregationValue, final String queryValue, final boolean isQuerySyntax, final DocumentDbSchemaColumn column) { this.aggregationValue = aggregationValue; this.queryValue = queryValue; this.isQuerySyntax = isQuerySyntax; this.column = column; } /** * Formats the string value using the {@link String#format(String, Object...)} method. * * @param format the format string. * @param args the optional arguments. * @return the Operand with the value formatted. */ public static Operand format(final String format, final Object... args) { return new Operand(String.format(format, args)); } /** * Checks if operand represents a field reference. This includes RexInputRef as well as * field references wrapped in calls to CAST or REINTERPRET. */ public boolean isInputRef() { return this.column != null; } @Override public String toString() { return aggregationValue; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o instanceof String) { final String stringValue = (String) o; return stringValue.equals(this.getAggregationValue()); } if (!(o instanceof Operand)) { return false; } final Operand that = (Operand) o; return Objects.equals(getAggregationValue(), that.getAggregationValue()); } @Override public int hashCode() { return Objects.hash(getAggregationValue()); } } /* /** * Rule to convert an {@link org.apache.calcite.rel.logical.Union} to a * {@link MongoUnionRel}. o/ private static class MongoUnionRule extends DocumentDbConverterRule { private MongoUnionRule(MongoConvention out) { super( Union.class, Convention.NONE, out, "MongoUnionRule"); } public RelNode convert(RelNode rel) { final Union union = (Union) rel; final RelTraitSet traitSet = union.getTraitSet().replace(out); return new MongoUnionRel( rel.getCluster(), traitSet, convertList(union.getInputs(), traitSet), union.all); } } public static class MongoUnionRel extends Union implements MongoRel { public MongoUnionRel( RelOptCluster cluster, RelTraitSet traitSet, List<RelNode> inputs, boolean all) { super(cluster, traitSet, inputs, all); } public MongoUnionRel copy( RelTraitSet traitSet, List<RelNode> inputs, boolean all) { return new MongoUnionRel(getCluster(), traitSet, inputs, all); } @Override public RelOptCost computeSelfCost(RelOptPlanner planner) { return super.computeSelfCost(planner).multiplyBy(.1); } public SqlString implement(MongoImplementor implementor) { return setOpSql(this, implementor, "UNION"); } } private static SqlString setOpSql( SetOp setOpRel, MongoImplementor implementor, String op) { final SqlBuilder buf = new SqlBuilder(implementor.dialect); for (Ord<RelNode> input : Ord.zip(setOpRel.getInputs())) { if (input.i > 0) { implementor.newline(buf) .append(op + (setOpRel.all ? " ALL " : "")); implementor.newline(buf); } buf.append(implementor.visitChild(input.i, input.e)); } return buf.toSqlString(); } /** * Rule to convert an {@link org.apache.calcite.rel.logical.LogicalIntersect} * to an {@link MongoIntersectRel}. o/ private static class MongoIntersectRule extends DocumentDbConverterRule { private MongoIntersectRule(MongoConvention out) { super( LogicalIntersect.class, Convention.NONE, out, "MongoIntersectRule"); } public RelNode convert(RelNode rel) { final LogicalIntersect intersect = (LogicalIntersect) rel; if (intersect.all) { return null; // INTERSECT ALL not implemented } final RelTraitSet traitSet = intersect.getTraitSet().replace(out); return new MongoIntersectRel( rel.getCluster(), traitSet, convertList(intersect.getInputs(), traitSet), intersect.all); } } public static class MongoIntersectRel extends Intersect implements MongoRel { public MongoIntersectRel( RelOptCluster cluster, RelTraitSet traitSet, List<RelNode> inputs, boolean all) { super(cluster, traitSet, inputs, all); assert !all; } public MongoIntersectRel copy( RelTraitSet traitSet, List<RelNode> inputs, boolean all) { return new MongoIntersectRel(getCluster(), traitSet, inputs, all); } public SqlString implement(MongoImplementor implementor) { return setOpSql(this, implementor, " intersect "); } } /** * Rule to convert an {@link org.apache.calcite.rel.logical.LogicalMinus} * to an {@link MongoMinusRel}. o/ private static class MongoMinusRule extends DocumentDbConverterRule { private MongoMinusRule(MongoConvention out) { super( LogicalMinus.class, Convention.NONE, out, "MongoMinusRule"); } public RelNode convert(RelNode rel) { final LogicalMinus minus = (LogicalMinus) rel; if (minus.all) { return null; // EXCEPT ALL not implemented } final RelTraitSet traitSet = rel.getTraitSet().replace(out); return new MongoMinusRel( rel.getCluster(), traitSet, convertList(minus.getInputs(), traitSet), minus.all); } } public static class MongoMinusRel extends Minus implements MongoRel { public MongoMinusRel( RelOptCluster cluster, RelTraitSet traitSet, List<RelNode> inputs, boolean all) { super(cluster, traitSet, inputs, all); assert !all; } public MongoMinusRel copy( RelTraitSet traitSet, List<RelNode> inputs, boolean all) { return new MongoMinusRel(getCluster(), traitSet, inputs, all); } public SqlString implement(MongoImplementor implementor) { return setOpSql(this, implementor, " minus "); } } public static class MongoValuesRule extends DocumentDbConverterRule { private MongoValuesRule(MongoConvention out) { super( LogicalValues.class, Convention.NONE, out, "MongoValuesRule"); } @Override public RelNode convert(RelNode rel) { LogicalValues valuesRel = (LogicalValues) rel; return new MongoValuesRel( valuesRel.getCluster(), valuesRel.getRowType(), valuesRel.getTuples(), valuesRel.getTraitSet().plus(out)); } } public static class MongoValuesRel extends Values implements MongoRel { MongoValuesRel( RelOptCluster cluster, RelDataType rowType, List<List<RexLiteral>> tuples, RelTraitSet traitSet) { super(cluster, rowType, tuples, traitSet); } @Override public RelNode copy( RelTraitSet traitSet, List<RelNode> inputs) { assert inputs.isEmpty(); return new MongoValuesRel( getCluster(), rowType, tuples, traitSet); } public SqlString implement(MongoImplementor implementor) { throw new AssertionError(); // TODO: } } */ }
4,586
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbTableScan.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.google.common.collect.ImmutableList; import com.mongodb.client.model.Aggregates; import com.mongodb.client.model.UnwindOptions; import org.apache.calcite.adapter.enumerable.EnumerableRules; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.rules.CoreRules; import org.apache.calcite.rel.type.RelDataType; import org.checkerframework.checker.nullness.qual.Nullable; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.util.List; import java.util.Map.Entry; /** * Relational expression representing a scan of a MongoDB collection. * * <p> Additional operations might be applied, * using the "find" or "aggregate" methods.</p> */ public class DocumentDbTableScan extends TableScan implements DocumentDbRel { private final DocumentDbTable mongoTable; private final RelDataType projectRowType; private final DocumentDbSchemaTable metadataTable; /** * Creates a DocumentDbTableScan. * * @param cluster Cluster * @param traitSet Traits * @param table Table * @param mongoTable MongoDB table * @param projectRowType Fields and types to project; null to project raw row */ protected DocumentDbTableScan(final RelOptCluster cluster, final RelTraitSet traitSet, final RelOptTable table, final DocumentDbTable mongoTable, final RelDataType projectRowType, final DocumentDbSchemaTable metadataTable) { super(cluster, traitSet, ImmutableList.of(), table); this.mongoTable = mongoTable; this.projectRowType = projectRowType; this.metadataTable = metadataTable; assert mongoTable != null; assert getConvention() == CONVENTION; } @Override public RelNode copy(final RelTraitSet traitSet, final List<RelNode> inputs) { assert inputs.isEmpty(); return this; } @Override public RelDataType deriveRowType() { return projectRowType != null ? projectRowType : super.deriveRowType(); } @Override public @Nullable RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) { // scans with a small project list are cheaper final float f = projectRowType == null ? 1f : (float) projectRowType.getFieldCount() / 100f; final RelOptCost relOptCost = super.computeSelfCost(planner, mq); return relOptCost != null ? relOptCost.multiplyBy(.1 * f) : null; } @Override public void register(final RelOptPlanner planner) { planner.addRule(DocumentDbToEnumerableConverterRule.INSTANCE); for (RelOptRule rule : DocumentDbRules.RULES) { planner.addRule(rule); } // Keep the project node even for SELECT * queries. planner.removeRule(CoreRules.PROJECT_REMOVE); // Remove extra $limit on joins. planner.removeRule(CoreRules.SORT_JOIN_TRANSPOSE); // Remove enumerable rules to ensure we always do push-down instead regardless of cost. planner.removeRule(EnumerableRules.ENUMERABLE_AGGREGATE_RULE); planner.removeRule(EnumerableRules.ENUMERABLE_PROJECT_RULE); planner.removeRule(EnumerableRules.ENUMERABLE_JOIN_RULE); planner.removeRule(EnumerableRules.ENUMERABLE_LIMIT_RULE); planner.removeRule(EnumerableRules.ENUMERABLE_SORT_RULE); planner.removeRule(EnumerableRules.ENUMERABLE_FILTER_RULE); } @Override public void implement(final Implementor implementor) { implementor.setTable(table); implementor.setDocumentDbTable(mongoTable); implementor.setMetadataTable(metadataTable); // Add an unwind operation for each embedded array to convert to separate rows. // Assumes that all queries will use aggregate and not find. // Assumes that outermost arrays are added to the list first so pipeline executes correctly. for (Entry<String, DocumentDbSchemaColumn> column : metadataTable.getColumnMap().entrySet()) { if (column.getValue().isIndex()) { final String indexName = column.getKey(); final UnwindOptions opts = new UnwindOptions(); String arrayPath = column.getValue().getFieldPath(); arrayPath = "$" + arrayPath; opts.includeArrayIndex(indexName); opts.preserveNullAndEmptyArrays(true); implementor.addUnwind(String.valueOf(Aggregates.unwind(arrayPath, opts))); } } // Filter out any rows for which the table does not exist. final String matchFilter = DocumentDbJoin .buildFieldsExistMatchFilter(DocumentDbJoin.getFilterColumns(metadataTable)); if (matchFilter != null && DocumentDbJoin.isTableVirtual(metadataTable)) { implementor.setVirtualTableFilter(matchFilter); } } }
4,587
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbToEnumerableConverter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.adapter.enumerable.EnumerableRel; import org.apache.calcite.adapter.enumerable.EnumerableRelImplementor; import org.apache.calcite.adapter.enumerable.JavaRowFormat; import org.apache.calcite.adapter.enumerable.PhysType; import org.apache.calcite.adapter.enumerable.PhysTypeImpl; import org.apache.calcite.config.CalciteSystemProperty; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.MethodCallExpression; import org.apache.calcite.plan.ConventionTraitDef; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.convert.ConverterImpl; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.runtime.Hook; import org.apache.calcite.util.BuiltInMethod; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbRel.Implementor; import java.util.AbstractList; import java.util.ArrayList; import java.util.List; /** * Relational expression representing a scan of a table in a Mongo data source. */ public class DocumentDbToEnumerableConverter extends ConverterImpl implements EnumerableRel { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbToEnumerableConverter.class); protected DocumentDbToEnumerableConverter( final RelOptCluster cluster, final RelTraitSet traits, final RelNode input) { super(cluster, ConventionTraitDef.INSTANCE, traits, input); } @Override public RelNode copy(final RelTraitSet traitSet, final List<RelNode> inputs) { return new DocumentDbToEnumerableConverter( getCluster(), traitSet, sole(inputs)); } @Override public @Nullable RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) { final RelOptCost relOptCost = super.computeSelfCost(planner, mq); return relOptCost != null ? relOptCost.multiplyBy(DocumentDbRules.ENUMERABLE_COST_FACTOR) : null; } @Override public Result implement(final EnumerableRelImplementor implementor, final Prefer pref) { // Generates a call to "find" or "aggregate", depending upon whether // an aggregate is present. // // ((MongoTable) schema.getTable("zips")).find( // "{state: 'CA'}", // "{city: 1, zipcode: 1}") // // ((MongoTable) schema.getTable("zips")).aggregate( // "{$filter: {state: 'CA'}}", // "{$group: {_id: '$city', c: {$sum: 1}, p: {$sum: "$pop"}}") final BlockBuilder list = new BlockBuilder(); final DocumentDbRel.Implementor mongoImplementor = new DocumentDbRel.Implementor(getCluster().getRexBuilder()); mongoImplementor.visitChild(0, getInput()); final RelDataType rowType = getRowType(); final PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), rowType, pref.prefer(JavaRowFormat.ARRAY)); final Expression fields = list.append("fields", constantArrayList( Pair.zip(DocumentDbRules.mongoFieldNames(rowType, mongoImplementor.getMetadataTable()), new AbstractList<Class>() { @Override public Class get(final int index) { return physType.fieldClass(index); } @Override public int size() { return rowType.getFieldCount(); } }), Pair.class)); final Expression paths = list.append("paths", constantArrayList( DocumentDbRules.mongoFieldNames(rowType, mongoImplementor.getMetadataTable()), String.class)); final Expression table = list.append("table", mongoImplementor.getTable().getExpression( DocumentDbTable.DocumentDbQueryable.class)); // DocumentDB: modified - start handleVirtualTable(mongoImplementor); // DocumentDB: modified - end final List<String> opList = Pair.right(mongoImplementor.getList()); final Expression ops = list.append("ops", constantArrayList(opList, String.class)); final Expression enumerable = list.append("enumerable", Expressions.call(table, DocumentDbMethod.MONGO_QUERYABLE_AGGREGATE.getMethod(), fields, paths, ops)); if (CalciteSystemProperty.DEBUG.value()) { LOGGER.info("opList: {}", opList); } Hook.QUERY_PLAN.run(opList); list.add( Expressions.return_(null, enumerable)); return implementor.result(physType, list.toBlock()); } /** E.g. {@code constantArrayList("x", "y")} returns * "Arrays.asList('x', 'y')". * * @param values List of values * @param clazz Type of values * @return expression */ private static <T> MethodCallExpression constantArrayList(final List<T> values, final Class clazz) { return Expressions.call( BuiltInMethod.ARRAYS_AS_LIST.method, Expressions.newArrayInit(clazz, constantList(values))); } /** E.g. {@code constantList("x", "y")} returns * {@code {ConstantExpression("x"), ConstantExpression("y")}}. */ private static <T> List<Expression> constantList(final List<T> values) { return Util.transform(values, Expressions::constant); } /** * Adds aggregation stage to handle virtual tables. * @param implementor the implementor. */ public static void handleVirtualTable(final Implementor implementor) { final List<Pair<String, String>> stages = new ArrayList<>(); // Add the column resolutions and any unwinds. // Order depends on whether the resolution relies on any unwound columns. if (implementor.isResolutionNeedsUnwind()) { implementor.getUnwinds().forEach(op -> stages.add(Pair.of(null, op))); implementor.getCollisionResolutions().forEach(op -> stages.add(Pair.of(null, op))); } else { implementor.getCollisionResolutions().forEach(op -> stages.add(Pair.of(null, op))); implementor.getUnwinds().forEach(op -> stages.add(Pair.of(null, op))); } // Add filter to remove purely null rows. Skipped if any joins were done beforehand. if (!implementor.isNullFiltered() && implementor.getVirtualTableFilter() != null) { stages.add(Pair.of(null, implementor.getVirtualTableFilter())); } implementor.setNullFiltered(true); stages.addAll(implementor.getList()); implementor.setList(stages); } }
4,588
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbRel.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.plan.Convention; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.util.Pair; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.time.Instant; import java.util.ArrayList; import java.util.List; /** * Relational expression that uses Mongo calling convention. */ public interface DocumentDbRel extends RelNode { /** * Implements the implementor. * @param implementor the implementor to implement */ void implement(Implementor implementor); /** Calling convention for relational operations that occur in MongoDB. */ Convention CONVENTION = new Convention.Impl("MONGO", DocumentDbRel.class); /** Callback for the implementation process that converts a tree of * {@link DocumentDbRel} nodes into a MongoDB query. */ class Implementor { // DocumentDB: modified - start private List<Pair<String, String>> list = new ArrayList<>(); private final RexBuilder rexBuilder; private RelOptTable table; private DocumentDbSchemaTable metadataTable; private DocumentDbTable documentDbTable; private final List<String> unwinds = new ArrayList<>(); private final List<String> collisionResolutions = new ArrayList<>(); private String virtualTableFilter; private boolean nullFiltered = false; private boolean join = false; private boolean resolutionNeedsUnwind = false; private final Instant currentTime = Instant.now(); // DocumentDB: modified - end public List<Pair<String, String>> getList() { return list; } public void setList(final List<Pair<String, String>> list) { this.list = list; } public RexBuilder getRexBuilder() { return rexBuilder; } public RelOptTable getTable() { return table; } public void setTable(final RelOptTable table) { this.table = table; } // DocumentDB: modified - start public void setDocumentDbTable(final DocumentDbTable table) { this.documentDbTable = table; } public DocumentDbTable getDocumentDbTable() { return documentDbTable; } public DocumentDbSchemaTable getMetadataTable() { return metadataTable; } public void setMetadataTable(final DocumentDbSchemaTable metadataTable) { this.metadataTable = metadataTable; } public Implementor(final RexBuilder rexBuilder) { this.rexBuilder = rexBuilder; } public void add(final String findOp, final String aggOp) { list.add(Pair.of(findOp, aggOp)); } public void add(final int index, final String findOp, final String aggOp) { list.add(index, Pair.of(findOp, aggOp)); } public void addUnwind(final String op) { unwinds.add(op); } public List<String> getUnwinds() { return unwinds; } public void setVirtualTableFilter(final String op) { this.virtualTableFilter = op; } public String getVirtualTableFilter() { return virtualTableFilter; } public void addCollisionResolution(final String op) { collisionResolutions.add(op); } public List<String> getCollisionResolutions() { return collisionResolutions; } public void setResolutionNeedsUnwind(final boolean resolutionNeedsUnwind) { this.resolutionNeedsUnwind = resolutionNeedsUnwind; } public boolean isResolutionNeedsUnwind() { return resolutionNeedsUnwind; } public boolean isNullFiltered() { return nullFiltered; } public void setNullFiltered(final boolean nullFiltered) { this.nullFiltered = nullFiltered; } public boolean isJoin() { return join; } public void setJoin(final boolean join) { this.join = join; } // DocumentDB: modified - end public void visitChild(final int ordinal, final RelNode input) { assert ordinal == 0; final boolean isJoin = isJoin(); ((DocumentDbRel) input).implement(this); setJoin(isJoin); } public Instant getCurrentTime() { return currentTime; } } }
4,589
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbEnumerator.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.linq4j.Enumerator; /** Implements the enumerator interface but does not return data. */ class DocumentDbEnumerator implements Enumerator<Object> { /** Creates a DocumentDbEnumerator. */ DocumentDbEnumerator() { } @Override public Object current() { return null; } @Override public boolean moveNext() { return false; } @Override public void reset() { throw new UnsupportedOperationException(); } @Override public void close() { } }
4,590
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbProject.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.google.common.collect.ImmutableList; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.util.Pair; import org.apache.calcite.util.Util; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbRules.Operand; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataTable; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; /** * Implementation of {@link Project} * relational expression in MongoDB. */ public class DocumentDbProject extends Project implements DocumentDbRel { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbProject.class.getName()); private static final String ID_FIELD = "_id"; /** * Creates a new {@link DocumentDbProject} * @param cluster the cluster. * @param traitSet the trait set. * @param input the input. * @param projects the projections * @param rowType the raw relational type. */ public DocumentDbProject(final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode input, final List<? extends RexNode> projects, final RelDataType rowType) { super(cluster, traitSet, ImmutableList.of(), input, projects, rowType); assert getConvention() == CONVENTION; assert getConvention() == input.getConvention(); } /** * DEPRECATED - Creates a new {@link DocumentDbProject} * @param cluster the cluster. * @param traitSet the trait set. * @param input the input. * @param projects the projections. * @param rowType the row type. * @param flags the flags. */ @Deprecated // to be removed before 2.0 public DocumentDbProject(final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode input, final List<RexNode> projects, final RelDataType rowType, final int flags) { this(cluster, traitSet, input, projects, rowType); Util.discard(flags); } @Override public Project copy(final RelTraitSet traitSet, final RelNode input, final List<RexNode> projects, final RelDataType rowType) { return new DocumentDbProject(getCluster(), traitSet, input, projects, rowType); } @Override public @Nullable RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) { final RelOptCost relOptCost = super.computeSelfCost(planner, mq); return relOptCost != null ? relOptCost.multiplyBy(DocumentDbRules.PROJECT_COST_FACTOR) : null; } @Override public void implement(final Implementor implementor) { implementor.visitChild(0, getInput()); // DocumentDB: modified - start final Implementor mongoImplementor = new Implementor(implementor.getRexBuilder()); mongoImplementor.visitChild(0, getInput()); final List<String> inNames = getInput().getRowType().getFieldNames(); final DocumentDbRules.RexToMongoTranslator translator = new DocumentDbRules.RexToMongoTranslator( (JavaTypeFactory) getCluster().getTypeFactory(), DocumentDbRules.mongoFieldNames( getInput().getRowType(), mongoImplementor.getMetadataTable()), inNames, mongoImplementor.getMetadataTable(), implementor.getCurrentTime()); final List<String> items = new ArrayList<>(); final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap = new LinkedHashMap<>(implementor.getMetadataTable().getColumnMap()); for (Pair<RexNode, String> pair : getNamedProjects()) { final String outName = DocumentDbRules.getNormalizedIdentifier(pair.right); final RexNode expandedNode = RexUtil.expandSearch( implementor.getRexBuilder(), null, pair.left); final Operand expr = expandedNode.accept(translator); // Check if we are projecting an existing field or generating a new expression. if (pair.left instanceof RexInputRef) { final RexInputRef ref = (RexInputRef) pair.left; final String inName = inNames.get(ref.getIndex()); final DocumentDbSchemaColumn oldColumn = implementor.getMetadataTable().getColumnMap().get(inName); columnMap.remove(inName); if (implementor.isJoin() || getRowType().getFieldList().size() > DocumentDbRules.MAX_PROJECT_FIELDS) { // If doing a join or project list is too large (greater than max), // replace the metadata entry but do not project the underlying data. // Path stays the same. columnMap.put(outName, oldColumn); } else { // If not joining, replace the metadata entry and project. Path is updated. final DocumentDbMetadataColumn newColumn = DocumentDbMetadataColumn.builder() .fieldPath(oldColumn.getFieldPath()) .sqlName(oldColumn.getSqlName()) .sqlType(oldColumn.getSqlType()) .dbType(oldColumn.getDbType()) .isIndex(oldColumn.isIndex()) .isPrimaryKey(oldColumn.isPrimaryKey()) .foreignKeyTableName(oldColumn.getForeignKeyTableName()) .foreignKeyColumnName(oldColumn.getForeignKeyColumnName()) .resolvedPath(outName) .build(); columnMap.put(outName, newColumn); items.add(DocumentDbRules.maybeQuote(outName) + ": " + expr); } } else { items.add(DocumentDbRules.maybeQuote(outName) + ": " + expr); columnMap.put(outName, DocumentDbMetadataColumn.builder() .isGenerated(true) .fieldPath(outName) .sqlName(outName) .build()); } } if (!items.isEmpty()) { // If we are doing a join, we want to preserve all fields. Use $addFields only. // Else, use $project. final String stageString; if (implementor.isJoin() || getRowType().getFieldList().size() > DocumentDbRules.MAX_PROJECT_FIELDS) { stageString = "$addFields"; } else { stageString = "$project"; // Explicitly remove _id field to reduce document size if it is not in output. if (!getRowType().getFieldNames().contains(ID_FIELD)) { items.add(ID_FIELD + ": 0"); } } final String findString = Util.toString(items, "{", ", ", "}"); final String aggregateString = "{" + stageString + ": " + findString + "}"; final Pair<String, String> op = Pair.of(findString, aggregateString); implementor.add(op.left, op.right); } LOGGER.info("Created projection stages of pipeline."); LOGGER.debug("Pipeline stages added: {}", implementor.getList().stream() .map(c -> c.right) .toArray()); // Set the metadata table with the updated column map. final DocumentDbSchemaTable metadata = DocumentDbMetadataTable.builder() .sqlName(implementor.getMetadataTable().getSqlName()) .collectionName(implementor.getMetadataTable().getCollectionName()) .columns(columnMap) .build(); implementor.setMetadataTable(metadata); implementor.setDocumentDbTable( new DocumentDbTable(implementor.getDocumentDbTable().getCollectionName(), metadata)); // DocumentDB: modified - end } }
4,591
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbToEnumerableConverterRule.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.adapter.enumerable.EnumerableConvention; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.convert.ConverterRule; /** * Rule to convert a relational expression from * {@link DocumentDbRel#CONVENTION} to {@link EnumerableConvention}. */ public class DocumentDbToEnumerableConverterRule extends ConverterRule { /** Singleton instance of MongoToEnumerableConverterRule. */ public static final ConverterRule INSTANCE = Config.INSTANCE .withConversion(RelNode.class, DocumentDbRel.CONVENTION, EnumerableConvention.INSTANCE, "MongoToEnumerableConverterRule") .withRuleFactory(DocumentDbToEnumerableConverterRule::new) .toRule(DocumentDbToEnumerableConverterRule.class); /** Called from the Config. */ protected DocumentDbToEnumerableConverterRule(final Config config) { super(config); } @Override public RelNode convert(final RelNode rel) { final RelTraitSet newTraitSet = rel.getTraitSet().replace(getOutConvention()); return new DocumentDbToEnumerableConverter(rel.getCluster(), newTraitSet, rel); } }
4,592
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbFilter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Filter; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.util.Util; import org.checkerframework.checker.nullness.qual.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbRules.Operand; import java.util.ArrayList; import java.util.List; /** * Implementation of a {@link Filter} * relational expression in MongoDB. */ public class DocumentDbFilter extends Filter implements DocumentDbRel { /** * This is a placeholder field to contain the output of the boolean expression in a * where clause. */ public static final String BOOLEAN_FLAG_FIELD = "\"placeholderField1F84EB1G3K47\""; private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbFilter.class.getName()); /** * Creates a new {@link DocumentDbFilter} * * @param cluster the relational option cluster. * @param traitSet the trait set. * @param child the child. * @param condition the condition. */ public DocumentDbFilter( final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode child, final RexNode condition) { super(cluster, traitSet, child, condition); assert getConvention() == CONVENTION; assert getConvention() == child.getConvention(); } @Override public @Nullable RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) { final RelOptCost relOptCost = super.computeSelfCost(planner, mq); return relOptCost != null ? relOptCost.multiplyBy(DocumentDbRules.FILTER_COST_FACTOR) : null; } @Override public DocumentDbFilter copy(final RelTraitSet traitSet, final RelNode input, final RexNode condition) { return new DocumentDbFilter(getCluster(), traitSet, input, condition); } @Override public void implement(final Implementor implementor) { implementor.visitChild(0, getInput()); // DocumentDB: modified - start final Implementor mongoImplementor = new Implementor(implementor.getRexBuilder()); mongoImplementor.visitChild(0, getInput()); final DocumentDbRules.RexToMongoTranslator rexToMongoTranslator = new DocumentDbRules.RexToMongoTranslator( (JavaTypeFactory) getCluster().getTypeFactory(), DocumentDbRules.mongoFieldNames( getInput().getRowType(), mongoImplementor.getMetadataTable()), getInput().getRowType().getFieldNames(), mongoImplementor.getMetadataTable(), implementor.getCurrentTime()); final RexNode expandedCondition = RexUtil.expandSearch(implementor.getRexBuilder(), null, condition); final Operand match = expandedCondition.accept(rexToMongoTranslator); // Use a single match stage if no aggregation operators are needed. // Else, use $addFields or $project to match on placeholder boolean field. This adds 3 stages. if (match.getQueryValue() != null) { final String condition = match.isInputRef() ? "{" + match.getQueryValue() + ": true}" : match.getQueryValue(); implementor.add(null, "{\"$match\": " + condition + "}"); } else { addAggregateOperatorStages(implementor, match); } LOGGER.info("Created filter stages of pipeline."); LOGGER.debug("Pipeline stages added: {}", implementor.getList().stream() .map(c -> c.right) .toArray()); } /** * Adds 3 stages to the pipeline: $addFields/$project to add a boolean * field representing the filter condition using aggregate operator syntax, * $match to match on the added field, and $project to remove the added field. * @param implementor the DocumentDbRel implementor * @param match the Operand for the filter condition */ private void addAggregateOperatorStages(final Implementor implementor, final Operand match) { if (implementor.isJoin() || getRowType().getFieldList().size() >= DocumentDbRules.MAX_PROJECT_FIELDS) { // If joining or the project list is too large (already at max), // only add the placeholder field to the documents. implementor.add(null, "{\"$addFields\": {" + BOOLEAN_FLAG_FIELD + ": " + match + "}}"); } else { // Else, project all current project items + the placeholder boolean field. final List<String> projectItems = new ArrayList<>(); for (String projectItem : DocumentDbRules.mongoFieldNames(getRowType(), implementor.getMetadataTable())) { projectItems.add(DocumentDbRules.maybeQuote(projectItem) + ": 1"); } projectItems.add(BOOLEAN_FLAG_FIELD + ": " + match); implementor.add(null, "{\"$project\": " + Util.toString(projectItems, "{", ", ", "}") + "}"); } // After matching, remove the placeholder field. implementor.add(null, "{\"$match\": {" + BOOLEAN_FLAG_FIELD + ": {\"$eq\": true}}}"); implementor.add(null, "{\"$project\": {" + BOOLEAN_FLAG_FIELD + ":0}}"); } /* /** Translates {@link RexNode} expressions into MongoDB expression strings. o/ static class Translator { private final JsonBuilder builder = new JsonBuilder(); private final Multimap<String, Pair<String, RexLiteral>> multiMap = HashMultimap.create(); private final Map<String, RexLiteral> eqMap = new LinkedHashMap<>(); private final RexBuilder rexBuilder; private final List<String> fieldNames; Translator(final RexBuilder rexBuilder, final List<String> fieldNames) { this.rexBuilder = rexBuilder; this.fieldNames = fieldNames; } private String translateMatch(final RexNode condition) { final Map<String, Object> map = builder.map(); map.put("$match", translateOr(condition)); return builder.toJsonString(map); } private Object translateOr(final RexNode condition) { final RexNode condition2 = RexUtil.expandSearch(rexBuilder, null, condition); final List<Object> list = new ArrayList<>(); for (RexNode node : RelOptUtil.disjunctions(condition2)) { list.add(translateAnd(node)); } switch (list.size()) { case 1: return list.get(0); default: final Map<String, Object> map = builder.map(); map.put("$or", list); return map; } } /** Translates a condition that may be an AND of other conditions. Gathers * together conditions that apply to the same field. o/ private Map<String, Object> translateAnd(final RexNode node0) { eqMap.clear(); multiMap.clear(); for (RexNode node : RelOptUtil.conjunctions(node0)) { translateMatch2(node); } final Map<String, Object> map = builder.map(); for (Map.Entry<String, RexLiteral> entry : eqMap.entrySet()) { multiMap.removeAll(entry.getKey()); map.put(entry.getKey(), literalValue(entry.getValue())); } for (Map.Entry<String, Collection<Pair<String, RexLiteral>>> entry : multiMap.asMap().entrySet()) { final Map<String, Object> map2 = builder.map(); for (Pair<String, RexLiteral> s : entry.getValue()) { addPredicate(map2, s.left, literalValue(s.right)); } map.put(entry.getKey(), map2); } return map; } @SuppressWarnings("unchecked") private static void addPredicate(final Map<String, Object> map, final String op, final Object v) { if (map.containsKey(op) && stronger(op, map.get(op), v)) { return; } if ("$ne".equals(op)) { if (map.containsKey("$nin") && map.get("$nin") instanceof List) { final List<Object> vars = (List<Object>) map.get("$nin"); vars.add(v); } else { final List<Object> vars = new ArrayList<>(); vars.add(null); vars.add(v); map.put("$nin", vars); } } else { map.put(op, v); } } /** Returns whether {@code v0} is a stronger value for operator {@code key} * than {@code v1}. * * <p>For example, {@code stronger("$lt", 100, 200)} returns true, because * "&lt; 100" is a more powerful condition than "&lt; 200". o/ private static boolean stronger(final String key, final Object v0, final Object v1) { if ("$lt".equals(key) || "$lte".equals(key)) { if (v0 instanceof Number && v1 instanceof Number) { return ((Number) v0).doubleValue() < ((Number) v1).doubleValue(); } if (v0 instanceof String && v1 instanceof String) { return v0.toString().compareTo(v1.toString()) < 0; } } if ("$gt".equals(key) || "$gte".equals(key)) { return stronger("$lt", v1, v0); } return false; } private static Object literalValue(final RexLiteral literal) { return literal.getValue2(); } private Void translateMatch2(final RexNode node) { switch (node.getKind()) { case EQUALS: return translateBinary(null, null, (RexCall) node); case LESS_THAN: return translateBinary("$lt", "$gt", (RexCall) node); case LESS_THAN_OR_EQUAL: return translateBinary("$lte", "$gte", (RexCall) node); case NOT_EQUALS: return translateBinary("$ne", "$ne", (RexCall) node); case GREATER_THAN: return translateBinary("$gt", "$lt", (RexCall) node); case GREATER_THAN_OR_EQUAL: return translateBinary("$gte", "$lte", (RexCall) node); default: throw new AssertionError("cannot translate " + node); } } /** Translates a call to a binary operator, reversing arguments if * necessary. o/ private Void translateBinary(final String op, final String rop, final RexCall call) { final RexNode left = call.operands.get(0); final RexNode right = call.operands.get(1); boolean b = translateBinary2(op, left, right); if (b) { return null; } b = translateBinary2(rop, right, left); if (b) { return null; } throw new AssertionError("cannot translate op " + op + " call " + call); } /** Translates a call to a binary operator. Returns whether successful. o/ private boolean translateBinary2(final String op, final RexNode left, final RexNode right) { switch (right.getKind()) { case LITERAL: break; default: return false; } final RexLiteral rightLiteral = (RexLiteral) right; switch (left.getKind()) { case INPUT_REF: final RexInputRef left1 = (RexInputRef) left; final String name = fieldNames.get(left1.getIndex()); translateOp2(op, name, rightLiteral); return true; case CAST: return translateBinary2(op, ((RexCall) left).operands.get(0), right); case ITEM: final String itemName = DocumentDbRules.isItem((RexCall) left); if (itemName != null) { translateOp2(op, itemName, rightLiteral); return true; } // fall through default: return false; } } private void translateOp2(final String op, final String name, final RexLiteral right) { if (op == null) { // E.g.: {deptno: 100} eqMap.put(name, right); } else { // E.g. {deptno: {$lt: 100}} // which may later be combined with other conditions: // E.g. {deptno: [$lt: 100, $gt: 50]} multiMap.put(name, Pair.of(op, right)); } } } */ }
4,593
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbAggregate.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.google.common.collect.ImmutableList; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.InvalidRelException; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataTable; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import static software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbRules.getNormalizedIdentifier; import static software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbRules.maybeQuote; /** * Implementation of * {@link Aggregate} relational expression * in MongoDB. */ public class DocumentDbAggregate extends Aggregate implements DocumentDbRel { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbAggregate.class.getName()); /** * Creates a new {@link DocumentDbAggregate} * @param cluster the {@link RelOptCluster} cluster. * @param traitSet the trait set. * @param input the node input. * @param groupSet the group set. * @param groupSets the group sets. * @param aggCalls the aggregate calls. * @throws InvalidRelException if aggregate call includes the unsupported DISTINCT */ public DocumentDbAggregate( final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode input, final ImmutableBitSet groupSet, final List<ImmutableBitSet> groupSets, final List<AggregateCall> aggCalls) throws InvalidRelException { super(cluster, traitSet, ImmutableList.of(), input, groupSet, groupSets, aggCalls); assert getConvention() == CONVENTION; assert getConvention() == input.getConvention(); switch (getGroupType()) { case SIMPLE: break; default: throw new InvalidRelException("unsupported group type: " + getGroupType()); } } /** * DEPRECATED * @param cluster the cluster. * @param traitSet the trait set. * @param input the input. * @param indicator the indicator. * @param groupSet the group set. * @param groupSets the group sets. * @param aggCalls the aggregate calls. * @throws InvalidRelException if aggregate call includes the unsupported DISTINCT */ @Deprecated // to be removed before 2.0 public DocumentDbAggregate(final RelOptCluster cluster, final RelTraitSet traitSet, final RelNode input, final boolean indicator, final ImmutableBitSet groupSet, final List<ImmutableBitSet> groupSets, final List<AggregateCall> aggCalls) throws InvalidRelException { this(cluster, traitSet, input, groupSet, groupSets, aggCalls); checkIndicator(indicator); } @Override public Aggregate copy(final RelTraitSet traitSet, final RelNode input, final ImmutableBitSet groupSet, final List<ImmutableBitSet> groupSets, final List<AggregateCall> aggCalls) { try { return new DocumentDbAggregate(getCluster(), traitSet, input, groupSet, groupSets, aggCalls); } catch (InvalidRelException e) { // Semantic error not possible. Must be a bug. Convert to // internal error. throw new AssertionError(e); } } @Override public void implement(final Implementor implementor) { implementor.visitChild(0, getInput()); final List<String> list = new ArrayList<>(); final Implementor mongoImplementor = new Implementor(implementor.getRexBuilder()); mongoImplementor.visitChild(0, getInput()); // DocumentDB: modified - start final List<String> mongoFieldNames = DocumentDbRules.mongoFieldNames(getInput().getRowType(), mongoImplementor.getMetadataTable()); final List<String> inNames = getInput().getRowType().getFieldNames(); final List<String> outNames = getRowType().getFieldNames(); final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap = new LinkedHashMap<>(implementor.getMetadataTable().getColumnMap()); int columnIndex = 0; if (groupSet.cardinality() == 1) { final String outName = outNames.get(columnIndex); final String inName = inNames.get(groupSet.nth(0)); final String fieldName = mongoFieldNames.get(groupSet.nth(0)); final DocumentDbSchemaColumn oldColumn = implementor.getMetadataTable().getColumnMap().get(inName); list.add("_id: " + maybeQuote("$" + fieldName)); assert oldColumn != null; columnMap.put(outName, getUpdatedColumn(oldColumn, outName)); ++columnIndex; } else { final List<String> keys = new ArrayList<>(); for (int group : groupSet) { final String outName = outNames.get(columnIndex); final String inName = inNames.get(group); final String fieldName = mongoFieldNames.get(group); final DocumentDbSchemaColumn oldColumn = implementor.getMetadataTable().getColumnMap().get(inName); keys.add(maybeQuote(acceptedMongoFieldName(outName)) + ": " + DocumentDbRules.quote("$" + fieldName)); assert oldColumn != null; columnMap.put(outName, getUpdatedColumn(oldColumn, outName)); ++columnIndex; } list.add("_id: " + Util.toString(keys, "{", ", ", "}")); } for (AggregateCall aggCall : aggCalls) { final String outName = outNames.get(columnIndex++); list.add( maybeQuote(acceptedMongoFieldName(outName)) + ": " + toMongo(aggCall.getAggregation(), mongoFieldNames, aggCall.getArgList(), aggCall.isDistinct())); columnMap.put(outName, DocumentDbMetadataColumn.builder() .isGenerated(true) .fieldPath(acceptedMongoFieldName(outName)) .sqlName(outName) .build()); } implementor.add(null, "{$group: " + Util.toString(list, "{", ", ", "}") + "}"); final List<String> fixups = getFixups(aggCalls, groupSet, outNames); if (!groupSet.isEmpty() || aggCalls.stream().anyMatch(aggCall -> aggCall.isDistinct() || aggCall.getAggregation() == SqlStdOperatorTable.SUM)) { implementor.add(null, "{$project: " + Util.toString(fixups, "{", ", ", "}") + "}"); } // Set the metadata table with the updated column map. final DocumentDbSchemaTable oldMetadata = implementor.getMetadataTable(); final DocumentDbSchemaTable metadata = DocumentDbMetadataTable.builder() .sqlName(oldMetadata.getSqlName()) .collectionName(oldMetadata.getCollectionName()) .columns(columnMap) .build(); implementor.setMetadataTable(metadata); implementor.setDocumentDbTable( new DocumentDbTable(implementor.getDocumentDbTable().getCollectionName(), metadata)); LOGGER.info("Created aggregation stages of pipeline."); LOGGER.debug("Pipeline stages added: {}", implementor.getList().stream() .map(c -> c.right) .toArray()); // DocumentDB: modified - end } private static DocumentDbSchemaColumn getUpdatedColumn(final DocumentDbSchemaColumn oldColumn, final String outName) { return DocumentDbMetadataColumn.builder() .fieldPath(oldColumn.getFieldPath()) .sqlName(oldColumn.getSqlName()) .sqlType(oldColumn.getSqlType()) .dbType(oldColumn.getDbType()) .isIndex(oldColumn.isIndex()) .isPrimaryKey(oldColumn.isPrimaryKey()) .foreignKeyTableName(oldColumn.getForeignKeyTableName()) .foreignKeyColumnName(oldColumn.getForeignKeyColumnName()) .resolvedPath(acceptedMongoFieldName(outName)) .build(); } private static String toMongo(final SqlAggFunction aggregation, final List<String> inNames, final List<Integer> args, final boolean isDistinct) { // Apart from COUNT(*) which has 0 arguments, supported aggregations should be a called with only 1 argument. if (!(args.isEmpty() && aggregation == SqlStdOperatorTable.COUNT) && args.size() != 1) { throw new AssertionError("aggregate with incorrect number of arguments: " + aggregation); } // For distinct calls, add to a set and get aggregate after. if (isDistinct) { assert args.size() == 1; final String inName = inNames.get(args.get(0)); return "{$addToSet: " + maybeQuote("$" + inName) + "}"; } if (aggregation == SqlStdOperatorTable.COUNT) { if (args.isEmpty()) { return "{$sum: 1}"; } else { final String inName = inNames.get(args.get(0)); return "{$sum: {$cond: [ {$gt: " + "[" + maybeQuote("$" + inName) + ", null]}, 1, 0]}}"; } } else if (aggregation == SqlStdOperatorTable.SUM) { final String inName = inNames.get(args.get(0)); return "{$push: " + maybeQuote("$" + inName) + "}"; } else if (aggregation == SqlStdOperatorTable.SUM0) { final String inName = inNames.get(args.get(0)); return "{$sum: " + maybeQuote("$" + inName) + "}"; } else if (aggregation == SqlStdOperatorTable.MIN) { final String inName = inNames.get(args.get(0)); return "{$min: " + maybeQuote("$" + inName) + "}"; } else if (aggregation == SqlStdOperatorTable.MAX) { final String inName = inNames.get(args.get(0)); return "{$max: " + maybeQuote("$" + inName) + "}"; } else if (aggregation == SqlStdOperatorTable.AVG) { final String inName = inNames.get(args.get(0)); return "{$avg: " + maybeQuote("$" + inName) + "}"; } else { throw new AssertionError("unknown aggregate " + aggregation); } } private static String acceptedMongoFieldName(final String path) { return getNormalizedIdentifier(path).replace('.', '_'); } private static String setToAggregate(final SqlAggFunction aggFunction, final String outName) { if (aggFunction == SqlStdOperatorTable.COUNT) { // Return size of set with null values removed. return "{$size: {$filter: {" + "input:" + maybeQuote("$" + outName) + ", " + "cond: { $gt: [ '$$this', null]}}}}"; } else if (aggFunction == SqlStdOperatorTable.AVG) { return "{$avg: " + maybeQuote("$" + outName) + " }"; } else if (aggFunction == SqlStdOperatorTable.SUM) { return arrayToSum(outName); } else if (aggFunction == SqlStdOperatorTable.SUM0) { return "{$sum: " + maybeQuote("$" + outName) + " }"; } else { throw new AssertionError("unknown distinct aggregate" + aggFunction); } } private static String arrayToSum(final String outName) { // If there are any non-null values, return the sum. Otherwise, return null. return String.format( "{$cond: [ {$gt: [ {$size: {$filter: {" + "input: %1$s, " + "cond: { $gt: [ '$$this', null]}}}}, 0]}, " + "{$sum: %1$s }, null]}", maybeQuote("$" + outName)); } /** * Determines the $project stage after the $group stage. "Fixups" are needed * when columns from the group set are selected or there are distinct aggregate calls. * Logic was pulled out of original implementation of implement method. * @param aggCalls the aggregate calls. * @param groupSet the group set. * @param outNames the names of the output row type. * @return list of fields that should be projected. */ private static List<String> getFixups( final List<AggregateCall> aggCalls, final ImmutableBitSet groupSet, final List<String> outNames) { // DocumentDB: modified - start final List<String> fixups = new ArrayList<>(); int columnIndex = 0; if (groupSet.cardinality() == 1) { fixups.add(maybeQuote(outNames.get(columnIndex++)) + ": " + maybeQuote("$" + "_id")); } else { fixups.add("_id: 0"); for (int group : groupSet) { final String outName = acceptedMongoFieldName(outNames.get(columnIndex++)); fixups.add( maybeQuote(outName) + ": " + maybeQuote("$_id." + acceptedMongoFieldName(outName))); } } for (AggregateCall aggCall : aggCalls) { final String outName = acceptedMongoFieldName(outNames.get(columnIndex++)); // Get the aggregate for any sets made in $group stage. if (aggCall.isDistinct()) { fixups.add(maybeQuote(outName) + ": " + setToAggregate( aggCall.getAggregation(), outName)); } else if (aggCall.getAggregation() == SqlStdOperatorTable.SUM) { // If there are any non-nulls, return the sum. Otherwise, return null. fixups.add(maybeQuote(outName) + ": " + arrayToSum(outName)); } else { fixups.add( maybeQuote(outName) + ": " + maybeQuote( "$" + outName)); } } return fixups; // DocumentDB: modified - end } }
4,594
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbMethod.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import com.google.common.collect.ImmutableMap; import org.apache.calcite.linq4j.tree.Types; import java.lang.reflect.Method; import java.util.List; /** * Builtin methods in the MongoDB adapter. */ public enum DocumentDbMethod { // TODO: Investigate using find() here for simpler queries. // See: https://github.com/aws/amazon-documentdb-jdbc-driver/issues/240 MONGO_QUERYABLE_AGGREGATE(DocumentDbTable.DocumentDbQueryable.class, "aggregate", List.class, List.class, List.class); @SuppressWarnings("ImmutableEnumChecker") private final Method method; public Method getMethod() { return method; } public static final ImmutableMap<Method, DocumentDbMethod> MAP; static { final ImmutableMap.Builder<Method, DocumentDbMethod> builder = ImmutableMap.builder(); for (DocumentDbMethod value : DocumentDbMethod.values()) { builder.put(value.method, value); } MAP = builder.build(); } DocumentDbMethod(final Class clazz, final String methodName, final Class... argumentTypes) { this.method = Types.lookupMethod(clazz, methodName, argumentTypes); } }
4,595
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbSchema.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package software.amazon.documentdb.jdbc.calcite.adapter; import lombok.SneakyThrows; import org.apache.calcite.schema.Table; import org.apache.calcite.schema.impl.AbstractSchema; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.common.utilities.LazyLinkedHashMap; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata; import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable; import java.util.LinkedHashSet; import java.util.Map; /** * Provides a schema for DocumentDB */ public class DocumentDbSchema extends AbstractSchema { private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbSchema.class); private Map<String, Table> tables; private final DocumentDbDatabaseSchemaMetadata databaseMetadata; private final String databaseName; /** * Constructs a new {@link DocumentDbSchema} from {@link DocumentDbDatabaseSchemaMetadata}. * * @param databaseMetadata the database metadata. */ protected DocumentDbSchema(final DocumentDbDatabaseSchemaMetadata databaseMetadata, final DocumentDbConnectionProperties connectionProperties) { this.databaseMetadata = databaseMetadata; this.databaseName = connectionProperties.getDatabase(); tables = null; } @SneakyThrows @Override protected Map<String, Table> getTableMap() { if (tables == null) { tables = new LazyLinkedHashMap<>( new LinkedHashSet<>(databaseMetadata.getTableSchemaMap().keySet()), this::getDocumentDbTable); } return tables; } @SneakyThrows private Table getDocumentDbTable(final String tableName) { final DocumentDbSchemaTable schemaTable = databaseMetadata .getTableSchemaMap().get(tableName); if (schemaTable == null) { // This will occur if the table schema is deleted after retrieving the // database schema. throw SqlError.createSQLException( LOGGER, SqlState.DATA_EXCEPTION, SqlError.INCONSISTENT_SCHEMA, tableName); } return new DocumentDbTable(schemaTable.getCollectionName(), schemaTable); } /** * Gets the name of the database. * * @return the name of the database. */ public String getDatabaseName() { return databaseName; } }
4,596
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/calcite/adapter/DocumentDbEnumerable.java
/* * Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.calcite.adapter; import lombok.AllArgsConstructor; import lombok.Getter; import org.apache.calcite.linq4j.AbstractEnumerable; import org.apache.calcite.linq4j.Enumerator; import org.bson.conversions.Bson; import java.util.List; /** * Initially, aggregate and find returned anonymous classes as the enumerable in CalciteSignature. * Returning this instead, allows us to get more information from CalciteSignature. */ @Getter @AllArgsConstructor public class DocumentDbEnumerable extends AbstractEnumerable<Object> { private final String databaseName; private final String collectionName; private final List<Bson> list; private final List<String> paths; @Override public Enumerator<Object> enumerator() { // Implement the enumerable interface but do not execute query. return new DocumentDbEnumerator(); } }
4,597
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/sshtunnel/DocumentDbSshTunnelClient.java
/* * Copyright <2022> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.sshtunnel; import com.google.common.annotations.VisibleForTesting; import org.checkerframework.checker.nullness.qual.NonNull; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import java.sql.SQLException; import java.util.concurrent.atomic.AtomicBoolean; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.isNullOrWhitespace; /** * The DocumentDbSshTunnelClient class provides a way for connections to ensure * a single instance SSH tunnel is started and stays running while this object is alive. */ public class DocumentDbSshTunnelClient implements AutoCloseable { private final DocumentDbSshTunnelServer sshTunnelServer; private final AtomicBoolean closed; private final Object lock = new Object(); /** * Creates a new SSH Tunnel client object from the given connection properties. * * @param properties The connection properties for this SSH Tunnel. * @throws SQLException When an error occurs attempting to ensure an SSH Tunnel instance is running. */ public DocumentDbSshTunnelClient(final @NonNull DocumentDbConnectionProperties properties) throws SQLException { validateSshTunnelProperties(properties); sshTunnelServer = DocumentDbSshTunnelServer.builder( properties.getSshUser(), properties.getSshHostname(), properties.getSshPrivateKeyFile(), properties.getHostname()) .sshPrivateKeyPassphrase(properties.getSshPrivateKeyPassphrase()) .sshStrictHostKeyChecking(properties.getSshStrictHostKeyChecking()) .sshKnownHostsFile(properties.getSshKnownHostsFile()) .build(); sshTunnelServer.addClient(); closed = new AtomicBoolean(false); } private static void validateSshTunnelProperties(final DocumentDbConnectionProperties properties) throws SQLException { if (isNullOrWhitespace(properties.getSshUser()) || isNullOrWhitespace(properties.getSshHostname()) || isNullOrWhitespace(properties.getSshPrivateKeyFile()) || isNullOrWhitespace(properties.getHostname())) { throw new IllegalArgumentException(); } DocumentDbSshTunnelServer.validateSshPrivateKeyFile(properties); DocumentDbSshTunnelServer.getSshKnownHostsFilename(properties); } /** * Gets the SSH tunnel listening port number. If the port number is zero, the SSH Tunnel is not running. * * @return The SSH tunnel listening port number, or zero. */ public int getServiceListeningPort() { return sshTunnelServer.getServiceListeningPort(); } /** * Gets indicator of whether the SSH Tunnel server is alive. * * @return Returns true if the server is alive, false otherwise. */ public boolean isServerAlive() { return getSshTunnelServer().isAlive(); } /** * Closes the client object by unlocking and deleting the client lock file. If this is the last client * for the server, the SSH Tunnel server will be shutdown. * * @throws SQLException When an error occurs closing the session. */ @Override public void close() throws SQLException { synchronized (lock) { if (closed.get()) { return; } sshTunnelServer.removeClient(); closed.set(true); } } /** * Gets the SSH Tunnel server object. * * @return An {@link DocumentDbSshTunnelServer} object. */ @VisibleForTesting @NonNull DocumentDbSshTunnelServer getSshTunnelServer() { return sshTunnelServer; } }
4,598
0
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc
Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/sshtunnel/DocumentDbSshTunnelServer.java
/* * Copyright <2022> Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. * */ package software.amazon.documentdb.jdbc.sshtunnel; import com.google.common.annotations.VisibleForTesting; import com.google.common.hash.Hashing; import com.jcraft.jsch.HostKey; import com.jcraft.jsch.HostKeyRepository; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; import lombok.AllArgsConstructor; import lombok.Getter; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.checkerframework.checker.nullness.qual.NonNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties; import software.amazon.documentdb.jdbc.common.utilities.SqlError; import software.amazon.documentdb.jdbc.common.utilities.SqlState; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.sql.SQLException; import java.util.Arrays; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.getPath; import static software.amazon.documentdb.jdbc.DocumentDbConnectionProperties.isNullOrWhitespace; /** * Provides a single-instance SSH Tunnel server. * <p> * Use the {@link #builder(String, String, String, String)} method to instantiate * a new {@link DocumentDbSshTunnelServerBuilder} object. Set the properties as needed, * then call the build() method. */ public final class DocumentDbSshTunnelServer implements AutoCloseable { public static final String SSH_KNOWN_HOSTS_FILE = "~/.ssh/known_hosts"; public static final String STRICT_HOST_KEY_CHECKING = "StrictHostKeyChecking"; public static final String HASH_KNOWN_HOSTS = "HashKnownHosts"; public static final String SERVER_HOST_KEY = "server_host_key"; public static final String YES = "yes"; public static final String NO = "no"; public static final String LOCALHOST = "localhost"; public static final int DEFAULT_DOCUMENTDB_PORT = 27017; public static final int DEFAULT_SSH_PORT = 22; private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbSshTunnelServer.class); public static final int DEFAULT_CLOSE_DELAY_MS = 30000; private final Object mutex = new Object(); private final AtomicLong clientCount = new AtomicLong(0); private final String sshUser; private final String sshHostname; private final String sshPrivateKeyFile; private final String sshPrivateKeyPassphrase; private final boolean sshStrictHostKeyChecking; private final String sshKnownHostsFile; private final String remoteHostname; private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); private DocumentDbSshTunnelServer.SshPortForwardingSession session = null; private ScheduledFuture<?> scheduledFuture = null; private long closeDelayMS = DEFAULT_CLOSE_DELAY_MS; private DocumentDbSshTunnelServer(final DocumentDbSshTunnelServerBuilder builder) { this.sshUser = builder.sshUser; this.sshHostname = builder.sshHostname; this.sshPrivateKeyFile = builder.sshPrivateKeyFile; this.remoteHostname = builder.sshRemoteHostname; this.sshPrivateKeyPassphrase = builder.sshPrivateKeyPassphrase; this.sshStrictHostKeyChecking = builder.sshStrictHostKeyChecking; this.sshKnownHostsFile = builder.sshKnownHostsFile; LOGGER.debug("sshUser='{}' sshHostname='{}' sshPrivateKeyFile='{}' remoteHostname'{}" + " sshPrivateKeyPassphrase='{}' sshStrictHostKeyChecking='{}' sshKnownHostsFile='{}'", this.sshUser, this.sshHostname, this.sshPrivateKeyFile, this.remoteHostname, this.sshPrivateKeyPassphrase, this.sshStrictHostKeyChecking, this.sshKnownHostsFile ); } /** * Gets the hash string for the SSH properties provided. * * @param sshUser the username credential for the SSH tunnel. * @param sshHostname the hostname (or IP address) for the SSH tunnel. * @param sshPrivateKeyFile the path to the private key file. * * @return a String value representing the hash of the given properties. */ static String getHashString( final String sshUser, final String sshHostname, final String sshPrivateKeyFile, final String remoteHostname) { final String sshPropertiesString = sshUser + "-" + sshHostname + "-" + sshPrivateKeyFile + remoteHostname; return Hashing.sha256() .hashString(sshPropertiesString, StandardCharsets.UTF_8) .toString(); } /** * Initializes the SSH session and creates a port forwarding tunnel. * * @param connectionProperties the {@link DocumentDbConnectionProperties} connection properties. * @return a {@link Session} session. This session must be closed by calling the * {@link Session#disconnect()} method. * @throws SQLException if unable to create SSH session or create the port forwarding tunnel. */ public static SshPortForwardingSession createSshTunnel( final DocumentDbConnectionProperties connectionProperties) throws SQLException { validateSshPrivateKeyFile(connectionProperties); LOGGER.debug("Internal SSH tunnel starting."); try { final JSch jSch = new JSch(); addIdentity(connectionProperties, jSch); final Session session = createSession(connectionProperties, jSch); connectSession(connectionProperties, jSch, session); final SshPortForwardingSession portForwardingSession = getPortForwardingSession( connectionProperties, session); LOGGER.debug("Internal SSH tunnel started on local port '{}'.", portForwardingSession.getLocalPort()); LOGGER.debug("Internal SSH tunnel started."); return portForwardingSession; } catch (Exception e) { throw logException(e); } } private static SshPortForwardingSession getPortForwardingSession( final DocumentDbConnectionProperties connectionProperties, final Session session) throws JSchException { final Pair<String, Integer> clusterHostAndPort = getHostAndPort( connectionProperties.getHostname(), DEFAULT_DOCUMENTDB_PORT); final int localPort = session.setPortForwardingL( LOCALHOST, 0, clusterHostAndPort.getLeft(), clusterHostAndPort.getRight()); return new SshPortForwardingSession(session, localPort); } private static Pair<String, Integer> getHostAndPort( final String hostname, final int defaultPort) { final String clusterHost; final int clusterPort; final int portSeparatorIndex = hostname.indexOf(':'); if (portSeparatorIndex >= 0) { clusterHost = hostname.substring(0, portSeparatorIndex); clusterPort = Integer.parseInt( hostname.substring(portSeparatorIndex + 1)); } else { clusterHost = hostname; clusterPort = defaultPort; } return new ImmutablePair<>(clusterHost, clusterPort); } private static void connectSession( final DocumentDbConnectionProperties connectionProperties, final JSch jSch, final Session session) throws SQLException { setSecurityConfig(connectionProperties, jSch, session); try { session.connect(); } catch (JSchException e) { throw logException(e); } } private static void addIdentity( final DocumentDbConnectionProperties connectionProperties, final JSch jSch) throws JSchException { final String privateKeyFileName = getPath(connectionProperties.getSshPrivateKeyFile(), DocumentDbConnectionProperties.getDocumentDbSearchPaths()).toString(); LOGGER.debug("SSH private key file resolved to '{}'.", privateKeyFileName); // If passPhrase protected, will need to provide this, too. final String passPhrase = !isNullOrWhitespace(connectionProperties.getSshPrivateKeyPassphrase()) ? connectionProperties.getSshPrivateKeyPassphrase() : null; jSch.addIdentity(privateKeyFileName, passPhrase); } private static Session createSession( final DocumentDbConnectionProperties connectionProperties, final JSch jSch) throws SQLException { final String sshUsername = connectionProperties.getSshUser(); final Pair<String, Integer> sshHostAndPort = getHostAndPort( connectionProperties.getSshHostname(), DEFAULT_SSH_PORT); setKnownHostsFile(connectionProperties, jSch); try { return jSch.getSession(sshUsername, sshHostAndPort.getLeft(), sshHostAndPort.getRight()); } catch (JSchException e) { throw logException(e); } } private static void setSecurityConfig( final DocumentDbConnectionProperties connectionProperties, final JSch jSch, final Session session) { if (!connectionProperties.getSshStrictHostKeyChecking()) { session.setConfig(STRICT_HOST_KEY_CHECKING, NO); return; } setHostKeyType(connectionProperties, jSch, session); } private static void setHostKeyType( final DocumentDbConnectionProperties connectionProperties, final JSch jSch, final Session session) { final HostKeyRepository keyRepository = jSch.getHostKeyRepository(); final HostKey[] hostKeys = keyRepository.getHostKey(); final Pair<String, Integer> sshHostAndPort = getHostAndPort( connectionProperties.getSshHostname(), DEFAULT_SSH_PORT); final HostKey hostKey = Arrays.stream(hostKeys) .filter(hk -> hk.getHost().equals(sshHostAndPort.getLeft())) .findFirst().orElse(null); // This will ensure a match between how the host key was hashed in the known_hosts file. final String hostKeyType = (hostKey != null) ? hostKey.getType() : null; // Append the hash algorithm if (hostKeyType != null) { session.setConfig(SERVER_HOST_KEY, session.getConfig(SERVER_HOST_KEY) + "," + hostKeyType); } // The default behaviour of `ssh-keygen` is to hash known hosts keys session.setConfig(HASH_KNOWN_HOSTS, YES); } private static void setKnownHostsFile( final DocumentDbConnectionProperties connectionProperties, final JSch jSch) throws SQLException { if (!connectionProperties.getSshStrictHostKeyChecking()) { return; } final String knownHostsFilename; knownHostsFilename = getSshKnownHostsFilename(connectionProperties); try { jSch.setKnownHosts(knownHostsFilename); } catch (JSchException e) { throw logException(e); } } private static <T extends Exception> SQLException logException(final T e) { LOGGER.error(e.getMessage(), e); if (e instanceof SQLException) { return (SQLException) e; } return new SQLException(e.getMessage(), e); } /** * Gets the SSH tunnel service listening port. A value of zero indicates that the * SSH tunnel service is not running. * * @return A port number that the SSH tunnel service is listening on. */ public int getServiceListeningPort() { return session != null ? session.getLocalPort() : 0; } @Override public void close() { synchronized (mutex) { if (session != null) { LOGGER.debug("Internal SSH Tunnel is stopping."); session.getSession().disconnect(); session = null; LOGGER.debug("Internal SSH Tunnel is stopped."); } } } /** * Adds a client to the reference count for this server. If this is the first client, the server * ensures that an SSH Tunnel service is started. * * @throws SQLException When an error occurs trying to start the SSH Tunnel service. */ public void addClient() throws SQLException { // Needs to be synchronized in a single process synchronized (mutex) { cancelScheduledFutureClose(); clientCount.incrementAndGet(); if (session != null && session.getLocalPort() != 0) { return; } validateLocalSshFilesExists(); session = createSshTunnel(getConnectionProperties()); } } /** * Removes a client from the reference count for this server. If the reference count reaches zero, then * the serve attempt to stop the SSH Tunnel service. * * @throws SQLException When an error occur attempting shutdown of the service process. */ public void removeClient() throws SQLException { synchronized (mutex) { // Takes advantage of OR to only decrement if greater than zero. if (clientCount.get() <= 0 || clientCount.decrementAndGet() > 0) { return; } closeSession(); } } /** * Closes the SSH tunnel session. If a close delay is given, delay the * close until that time has passed. * * @throws SQLException In the case the task is interrupted. */ private void closeSession() throws SQLException { cancelScheduledFutureClose(); // Delay the close, if indicated. final long delayMS = getCloseDelayMS(); if (delayMS <= 0) { close(); } else { LOGGER.debug("Close timer is being scheduled."); scheduledFuture = scheduler.schedule(getCloseTimerTask(), delayMS, TimeUnit.MILLISECONDS); } } /** * Gets the {@link Runnable} task to close the SSH tunnel session. * * @return the task to close the SSH tunnel session. */ private Runnable getCloseTimerTask() { return () -> { try { close(); } catch (Exception e) { // Ignore exception on close. LOGGER.warn(e.getMessage(), e); } }; } /** * Cancels the scheduled future to close the SSH tunnel session in the case a new client gets added before * the close occurs. * * @throws SQLException If interrupted during sleep. */ private void cancelScheduledFutureClose() throws SQLException { synchronized (mutex) { if (scheduledFuture != null) { LOGGER.debug("Close timer is being cancelled."); while (!scheduledFuture.isDone()) { scheduledFuture.cancel(false); try { TimeUnit.MILLISECONDS.sleep(10); } catch (InterruptedException e) { throw new SQLException(e.getMessage(), e); } } } scheduledFuture = null; } } @VisibleForTesting long getCloseDelayMS() { return closeDelayMS; } @VisibleForTesting void setCloseDelayMS(final long closeDelayMS) { this.closeDelayMS = closeDelayMS > 0 ? closeDelayMS : 0; } /** * Gets the number of clients using the server. * * @return The number of clients using the server. */ @VisibleForTesting long getClientCount() { synchronized (mutex) { return clientCount.get(); } } /** * Checks the state of the SSH tunnel service. * * @return Returns true if the SSH tunnel service is running. */ public boolean isAlive() { return session != null; } /** * Factory method for the {@link DocumentDbSshTunnelServerBuilder} class. * * @param user the SSH tunnel username. * @param hostname the SSH tunnel hostname. * @param privateKeyFile the SSH tunnel private key file path. * @param remoteHostname the hostname of the remote server. * * @return a new {@link DocumentDbSshTunnelServerBuilder} instance. */ public static DocumentDbSshTunnelServerBuilder builder( final String user, final String hostname, final String privateKeyFile, final String remoteHostname) { return new DocumentDbSshTunnelServerBuilder(user, hostname, privateKeyFile, remoteHostname); } /** * The {@link DocumentDbSshTunnelServer} builder class. * A call to the {@link #build()} method returns the single instance with * the matching SSH tunnel properties. */ public static class DocumentDbSshTunnelServerBuilder { private final String sshUser; private final String sshHostname; private final String sshPrivateKeyFile; private final String sshRemoteHostname; private String sshPrivateKeyPassphrase = null; private boolean sshStrictHostKeyChecking = true; private String sshKnownHostsFile = null; private static final ConcurrentMap<String, DocumentDbSshTunnelServer> SSH_TUNNEL_MAP = new ConcurrentHashMap<>(); /** * A builder class for the DocumentDbSshTunnelServer. * * @param sshUser the SSH tunnel username. * @param sshHostname the SSH tunnel hostname. * @param sshPrivateKeyFile the SSH tunnel private key file path. * @param sshRemoteHostname the hostname of the remote server. */ DocumentDbSshTunnelServerBuilder( final String sshUser, final String sshHostname, final String sshPrivateKeyFile, final String sshRemoteHostname) { this.sshUser = sshUser; this.sshHostname = sshHostname; this.sshPrivateKeyFile = sshPrivateKeyFile; this.sshRemoteHostname = sshRemoteHostname; } /** * Sets the private key passphrase. * * @param sshPrivateKeyPassphrase the private key passphrase. * @return the current instance of the builder. */ public DocumentDbSshTunnelServerBuilder sshPrivateKeyPassphrase(final String sshPrivateKeyPassphrase) { this.sshPrivateKeyPassphrase = sshPrivateKeyPassphrase; return this; } /** * Sets the strict host key checking option. * * @param sshStrictHostKeyChecking indicator of whether to set the strict host key checking option. * @return the current instance of the builder. */ public DocumentDbSshTunnelServerBuilder sshStrictHostKeyChecking(final boolean sshStrictHostKeyChecking) { this.sshStrictHostKeyChecking = sshStrictHostKeyChecking; return this; } /** * Sets the known hosts file property. * * @param sshKnownHostsFile the file path to the known hosts file. * * @return the current instance of the builder. */ public DocumentDbSshTunnelServerBuilder sshKnownHostsFile(final String sshKnownHostsFile) { this.sshKnownHostsFile = sshKnownHostsFile; return this; } /** * Builds a DocumentDbSshTunnelServer from the given properties. * * @return a new instance of DocumentDbSshTunnelServer. */ public DocumentDbSshTunnelServer build() { final String hashString = getHashString( this.sshUser, this.sshHostname, this.sshPrivateKeyFile, this.sshRemoteHostname ); // Returns single instance of server for the hashed properties. return SSH_TUNNEL_MAP.computeIfAbsent( hashString, key -> new DocumentDbSshTunnelServer(this) ); } } @NonNull private DocumentDbConnectionProperties getConnectionProperties() { final DocumentDbConnectionProperties connectionProperties = new DocumentDbConnectionProperties(); connectionProperties.setHostname(remoteHostname); connectionProperties.setSshUser(sshUser); connectionProperties.setSshHostname(sshHostname); connectionProperties.setSshPrivateKeyFile(sshPrivateKeyFile); connectionProperties.setSshStrictHostKeyChecking(String.valueOf(sshStrictHostKeyChecking)); if (sshPrivateKeyPassphrase != null) { connectionProperties.setSshPrivateKeyPassphrase(sshPrivateKeyPassphrase); } if (sshKnownHostsFile != null) { connectionProperties.setSshKnownHostsFile(sshKnownHostsFile); } return connectionProperties; } private void validateLocalSshFilesExists() throws SQLException { final DocumentDbConnectionProperties connectionProperties = getConnectionProperties(); validateSshPrivateKeyFile(connectionProperties); getSshKnownHostsFilename(connectionProperties); } static void validateSshPrivateKeyFile(final DocumentDbConnectionProperties connectionProperties) throws SQLException { if (!connectionProperties.isSshPrivateKeyFileExists()) { throw SqlError.createSQLException( LOGGER, SqlState.CONNECTION_EXCEPTION, SqlError.SSH_PRIVATE_KEY_FILE_NOT_FOUND, connectionProperties.getSshPrivateKeyFile()); } } static String getSshKnownHostsFilename(final DocumentDbConnectionProperties connectionProperties) throws SQLException { final String knowHostsFilename; if (!isNullOrWhitespace(connectionProperties.getSshKnownHostsFile())) { final Path knownHostsPath = getPath(connectionProperties.getSshKnownHostsFile()); validateSshKnownHostsFile(connectionProperties, knownHostsPath); knowHostsFilename = knownHostsPath.toString(); } else { knowHostsFilename = getPath(SSH_KNOWN_HOSTS_FILE).toString(); } return knowHostsFilename; } private static void validateSshKnownHostsFile( final DocumentDbConnectionProperties connectionProperties, final Path knownHostsPath) throws SQLException { if (!Files.exists(knownHostsPath)) { throw SqlError.createSQLException( LOGGER, SqlState.INVALID_PARAMETER_VALUE, SqlError.KNOWN_HOSTS_FILE_NOT_FOUND, connectionProperties.getSshKnownHostsFile()); } } /** * Container for the SSH port forwarding tunnel session. */ @Getter @AllArgsConstructor static class SshPortForwardingSession { /** * Gets the SSH session. */ private final Session session; /** * Gets the local port for the port forwarding tunnel. */ private final int localPort; } }
4,599